From 57b9700dafb225d5856fb75aa143efc769b199cf Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 8 Feb 2023 22:41:27 +0000 Subject: [PATCH 1/7] feat: enable "rest" transport in Python for services supporting numeric enums PiperOrigin-RevId: 508143576 Source-Link: https://github.com/googleapis/googleapis/commit/7a702a989db3b413f39ff8994ca53fb38b6928c2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/6ad1279c0e7aa787ac6b66c9fd4a210692edffcd Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNmFkMTI3OWMwZTdhYTc4N2FjNmI2NmM5ZmQ0YTIxMDY5MmVkZmZjZCJ9 --- owl-bot-staging/v2/.coveragerc | 13 + owl-bot-staging/v2/.flake8 | 33 + owl-bot-staging/v2/MANIFEST.in | 2 + owl-bot-staging/v2/README.rst | 49 + owl-bot-staging/v2/docs/conf.py | 376 + .../v2/docs/dlp_v2/dlp_service.rst | 10 + owl-bot-staging/v2/docs/dlp_v2/services.rst | 6 + owl-bot-staging/v2/docs/dlp_v2/types.rst | 6 + owl-bot-staging/v2/docs/index.rst | 7 + .../v2/google/cloud/dlp/__init__.py | 395 + .../v2/google/cloud/dlp/gapic_version.py | 16 + owl-bot-staging/v2/google/cloud/dlp/py.typed | 2 + .../v2/google/cloud/dlp_v2/__init__.py | 396 + .../google/cloud/dlp_v2/gapic_metadata.json | 538 + .../v2/google/cloud/dlp_v2/gapic_version.py | 16 + .../v2/google/cloud/dlp_v2/py.typed | 2 + .../google/cloud/dlp_v2/services/__init__.py | 15 + .../dlp_v2/services/dlp_service/__init__.py | 22 + .../services/dlp_service/async_client.py | 4142 ++++ .../dlp_v2/services/dlp_service/client.py | 4267 ++++ .../dlp_v2/services/dlp_service/pagers.py | 623 + .../dlp_service/transports/__init__.py | 38 + .../services/dlp_service/transports/base.py | 751 + .../services/dlp_service/transports/grpc.py | 1261 ++ .../dlp_service/transports/grpc_asyncio.py | 1260 ++ .../services/dlp_service/transports/rest.py | 4324 ++++ .../v2/google/cloud/dlp_v2/types/__init__.py | 390 + .../v2/google/cloud/dlp_v2/types/dlp.py | 8846 ++++++++ .../v2/google/cloud/dlp_v2/types/storage.py | 1474 ++ owl-bot-staging/v2/mypy.ini | 3 + owl-bot-staging/v2/noxfile.py | 184 + ..._dlp_service_activate_job_trigger_async.py | 52 + ...d_dlp_service_activate_job_trigger_sync.py | 52 + ...erated_dlp_service_cancel_dlp_job_async.py | 50 + ...nerated_dlp_service_cancel_dlp_job_sync.py | 50 + ...ervice_create_deidentify_template_async.py | 52 + ...service_create_deidentify_template_sync.py | 52 + ...erated_dlp_service_create_dlp_job_async.py | 52 + ...nerated_dlp_service_create_dlp_job_sync.py | 52 + ...p_service_create_inspect_template_async.py | 52 + ...lp_service_create_inspect_template_sync.py | 52 + ...ed_dlp_service_create_job_trigger_async.py | 56 + ...ted_dlp_service_create_job_trigger_sync.py | 56 + ...p_service_create_stored_info_type_async.py | 52 + ...lp_service_create_stored_info_type_sync.py | 52 + ...ed_dlp_service_deidentify_content_async.py | 51 + ...ted_dlp_service_deidentify_content_sync.py | 51 + ...ervice_delete_deidentify_template_async.py | 50 + ...service_delete_deidentify_template_sync.py | 50 + ...erated_dlp_service_delete_dlp_job_async.py | 50 + ...nerated_dlp_service_delete_dlp_job_sync.py | 50 + ...p_service_delete_inspect_template_async.py | 50 + ...lp_service_delete_inspect_template_sync.py | 50 + ...ed_dlp_service_delete_job_trigger_async.py | 50 + ...ted_dlp_service_delete_job_trigger_sync.py | 50 + ...p_service_delete_stored_info_type_async.py | 50 + ...lp_service_delete_stored_info_type_sync.py | 50 + ...erated_dlp_service_finish_dlp_job_async.py | 50 + ...nerated_dlp_service_finish_dlp_job_sync.py | 50 + ...p_service_get_deidentify_template_async.py | 52 + ...lp_service_get_deidentify_template_sync.py | 52 + ...generated_dlp_service_get_dlp_job_async.py | 52 + ..._generated_dlp_service_get_dlp_job_sync.py | 52 + ..._dlp_service_get_inspect_template_async.py | 52 + ...d_dlp_service_get_inspect_template_sync.py | 52 + ...rated_dlp_service_get_job_trigger_async.py | 52 + ...erated_dlp_service_get_job_trigger_sync.py | 52 + ..._dlp_service_get_stored_info_type_async.py | 52 + ...d_dlp_service_get_stored_info_type_sync.py | 52 + ...lp_service_hybrid_inspect_dlp_job_async.py | 52 + ...dlp_service_hybrid_inspect_dlp_job_sync.py | 52 + ...ervice_hybrid_inspect_job_trigger_async.py | 52 + ...service_hybrid_inspect_job_trigger_sync.py | 52 + ...rated_dlp_service_inspect_content_async.py | 51 + ...erated_dlp_service_inspect_content_sync.py | 51 + ...service_list_deidentify_templates_async.py | 53 + ..._service_list_deidentify_templates_sync.py | 53 + ...nerated_dlp_service_list_dlp_jobs_async.py | 53 + ...enerated_dlp_service_list_dlp_jobs_sync.py | 53 + ...rated_dlp_service_list_info_types_async.py | 51 + ...erated_dlp_service_list_info_types_sync.py | 51 + ...lp_service_list_inspect_templates_async.py | 53 + ...dlp_service_list_inspect_templates_sync.py | 53 + ...ted_dlp_service_list_job_triggers_async.py | 53 + ...ated_dlp_service_list_job_triggers_sync.py | 53 + ...lp_service_list_stored_info_types_async.py | 53 + ...dlp_service_list_stored_info_types_sync.py | 53 + ...enerated_dlp_service_redact_image_async.py | 51 + ...generated_dlp_service_redact_image_sync.py | 51 + ...ed_dlp_service_reidentify_content_async.py | 52 + ...ted_dlp_service_reidentify_content_sync.py | 52 + ...ervice_update_deidentify_template_async.py | 52 + ...service_update_deidentify_template_sync.py | 52 + ...p_service_update_inspect_template_async.py | 52 + ...lp_service_update_inspect_template_sync.py | 52 + ...ed_dlp_service_update_job_trigger_async.py | 52 + ...ted_dlp_service_update_job_trigger_sync.py | 52 + ...p_service_update_stored_info_type_async.py | 52 + ...lp_service_update_stored_info_type_sync.py | 52 + ...nippet_metadata_google.privacy.dlp.v2.json | 5503 +++++ .../v2/scripts/fixup_dlp_v2_keywords.py | 209 + owl-bot-staging/v2/setup.py | 90 + .../v2/testing/constraints-3.10.txt | 6 + .../v2/testing/constraints-3.11.txt | 6 + .../v2/testing/constraints-3.12.txt | 6 + .../v2/testing/constraints-3.7.txt | 9 + .../v2/testing/constraints-3.8.txt | 6 + .../v2/testing/constraints-3.9.txt | 6 + owl-bot-staging/v2/tests/__init__.py | 16 + owl-bot-staging/v2/tests/unit/__init__.py | 16 + .../v2/tests/unit/gapic/__init__.py | 16 + .../v2/tests/unit/gapic/dlp_v2/__init__.py | 16 + .../unit/gapic/dlp_v2/test_dlp_service.py | 17403 ++++++++++++++++ 113 files changed, 56293 insertions(+) create mode 100644 owl-bot-staging/v2/.coveragerc create mode 100644 owl-bot-staging/v2/.flake8 create mode 100644 owl-bot-staging/v2/MANIFEST.in create mode 100644 owl-bot-staging/v2/README.rst create mode 100644 owl-bot-staging/v2/docs/conf.py create mode 100644 owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst create mode 100644 owl-bot-staging/v2/docs/dlp_v2/services.rst create mode 100644 owl-bot-staging/v2/docs/dlp_v2/types.rst create mode 100644 owl-bot-staging/v2/docs/index.rst create mode 100644 owl-bot-staging/v2/google/cloud/dlp/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp/gapic_version.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp/py.typed create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/py.typed create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py create mode 100644 owl-bot-staging/v2/mypy.ini create mode 100644 owl-bot-staging/v2/noxfile.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json create mode 100644 owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py create mode 100644 owl-bot-staging/v2/setup.py create mode 100644 owl-bot-staging/v2/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/v2/tests/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py diff --git a/owl-bot-staging/v2/.coveragerc b/owl-bot-staging/v2/.coveragerc new file mode 100644 index 00000000..76798ec2 --- /dev/null +++ b/owl-bot-staging/v2/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/dlp/__init__.py + google/cloud/dlp/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/v2/.flake8 b/owl-bot-staging/v2/.flake8 new file mode 100644 index 00000000..29227d4c --- /dev/null +++ b/owl-bot-staging/v2/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/v2/MANIFEST.in b/owl-bot-staging/v2/MANIFEST.in new file mode 100644 index 00000000..148f6bf3 --- /dev/null +++ b/owl-bot-staging/v2/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/dlp *.py +recursive-include google/cloud/dlp_v2 *.py diff --git a/owl-bot-staging/v2/README.rst b/owl-bot-staging/v2/README.rst new file mode 100644 index 00000000..cf97c2e7 --- /dev/null +++ b/owl-bot-staging/v2/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Dlp API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Dlp API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v2/docs/conf.py b/owl-bot-staging/v2/docs/conf.py new file mode 100644 index 00000000..cf2f570a --- /dev/null +++ b/owl-bot-staging/v2/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-dlp documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-dlp" +copyright = u"2022, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-dlp-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-dlp.tex", + u"google-cloud-dlp Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-dlp", + u"Google Cloud Dlp Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-dlp", + u"google-cloud-dlp Documentation", + author, + "google-cloud-dlp", + "GAPIC library for Google Cloud Dlp API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst b/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst new file mode 100644 index 00000000..914da512 --- /dev/null +++ b/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst @@ -0,0 +1,10 @@ +DlpService +---------------------------- + +.. automodule:: google.cloud.dlp_v2.services.dlp_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dlp_v2.services.dlp_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v2/docs/dlp_v2/services.rst b/owl-bot-staging/v2/docs/dlp_v2/services.rst new file mode 100644 index 00000000..864a8c83 --- /dev/null +++ b/owl-bot-staging/v2/docs/dlp_v2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Dlp v2 API +==================================== +.. toctree:: + :maxdepth: 2 + + dlp_service diff --git a/owl-bot-staging/v2/docs/dlp_v2/types.rst b/owl-bot-staging/v2/docs/dlp_v2/types.rst new file mode 100644 index 00000000..5470b717 --- /dev/null +++ b/owl-bot-staging/v2/docs/dlp_v2/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Dlp v2 API +================================= + +.. automodule:: google.cloud.dlp_v2.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/v2/docs/index.rst b/owl-bot-staging/v2/docs/index.rst new file mode 100644 index 00000000..d119451a --- /dev/null +++ b/owl-bot-staging/v2/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + dlp_v2/services + dlp_v2/types diff --git a/owl-bot-staging/v2/google/cloud/dlp/__init__.py b/owl-bot-staging/v2/google/cloud/dlp/__init__.py new file mode 100644 index 00000000..3c1a800c --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp/__init__.py @@ -0,0 +1,395 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dlp import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.dlp_v2.services.dlp_service.client import DlpServiceClient +from google.cloud.dlp_v2.services.dlp_service.async_client import DlpServiceAsyncClient + +from google.cloud.dlp_v2.types.dlp import Action +from google.cloud.dlp_v2.types.dlp import ActivateJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import AnalyzeDataSourceRiskDetails +from google.cloud.dlp_v2.types.dlp import BoundingBox +from google.cloud.dlp_v2.types.dlp import BucketingConfig +from google.cloud.dlp_v2.types.dlp import ByteContentItem +from google.cloud.dlp_v2.types.dlp import CancelDlpJobRequest +from google.cloud.dlp_v2.types.dlp import CharacterMaskConfig +from google.cloud.dlp_v2.types.dlp import CharsToIgnore +from google.cloud.dlp_v2.types.dlp import Color +from google.cloud.dlp_v2.types.dlp import Container +from google.cloud.dlp_v2.types.dlp import ContentItem +from google.cloud.dlp_v2.types.dlp import ContentLocation +from google.cloud.dlp_v2.types.dlp import CreateDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import CreateDlpJobRequest +from google.cloud.dlp_v2.types.dlp import CreateInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import CreateJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import CreateStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import CryptoDeterministicConfig +from google.cloud.dlp_v2.types.dlp import CryptoHashConfig +from google.cloud.dlp_v2.types.dlp import CryptoKey +from google.cloud.dlp_v2.types.dlp import CryptoReplaceFfxFpeConfig +from google.cloud.dlp_v2.types.dlp import DataProfileAction +from google.cloud.dlp_v2.types.dlp import DataProfileConfigSnapshot +from google.cloud.dlp_v2.types.dlp import DataProfileJobConfig +from google.cloud.dlp_v2.types.dlp import DataProfileLocation +from google.cloud.dlp_v2.types.dlp import DataProfilePubSubCondition +from google.cloud.dlp_v2.types.dlp import DataProfilePubSubMessage +from google.cloud.dlp_v2.types.dlp import DataRiskLevel +from google.cloud.dlp_v2.types.dlp import DateShiftConfig +from google.cloud.dlp_v2.types.dlp import DateTime +from google.cloud.dlp_v2.types.dlp import DeidentifyConfig +from google.cloud.dlp_v2.types.dlp import DeidentifyContentRequest +from google.cloud.dlp_v2.types.dlp import DeidentifyContentResponse +from google.cloud.dlp_v2.types.dlp import DeidentifyTemplate +from google.cloud.dlp_v2.types.dlp import DeleteDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import DeleteDlpJobRequest +from google.cloud.dlp_v2.types.dlp import DeleteInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import DeleteJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import DeleteStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import DlpJob +from google.cloud.dlp_v2.types.dlp import DocumentLocation +from google.cloud.dlp_v2.types.dlp import Error +from google.cloud.dlp_v2.types.dlp import ExcludeByHotword +from google.cloud.dlp_v2.types.dlp import ExcludeInfoTypes +from google.cloud.dlp_v2.types.dlp import ExclusionRule +from google.cloud.dlp_v2.types.dlp import FieldTransformation +from google.cloud.dlp_v2.types.dlp import Finding +from google.cloud.dlp_v2.types.dlp import FinishDlpJobRequest +from google.cloud.dlp_v2.types.dlp import FixedSizeBucketingConfig +from google.cloud.dlp_v2.types.dlp import GetDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import GetDlpJobRequest +from google.cloud.dlp_v2.types.dlp import GetInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import GetJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import GetStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import HybridContentItem +from google.cloud.dlp_v2.types.dlp import HybridFindingDetails +from google.cloud.dlp_v2.types.dlp import HybridInspectDlpJobRequest +from google.cloud.dlp_v2.types.dlp import HybridInspectJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import HybridInspectResponse +from google.cloud.dlp_v2.types.dlp import HybridInspectStatistics +from google.cloud.dlp_v2.types.dlp import ImageLocation +from google.cloud.dlp_v2.types.dlp import ImageTransformations +from google.cloud.dlp_v2.types.dlp import InfoTypeCategory +from google.cloud.dlp_v2.types.dlp import InfoTypeDescription +from google.cloud.dlp_v2.types.dlp import InfoTypeStats +from google.cloud.dlp_v2.types.dlp import InfoTypeSummary +from google.cloud.dlp_v2.types.dlp import InfoTypeTransformations +from google.cloud.dlp_v2.types.dlp import InspectConfig +from google.cloud.dlp_v2.types.dlp import InspectContentRequest +from google.cloud.dlp_v2.types.dlp import InspectContentResponse +from google.cloud.dlp_v2.types.dlp import InspectDataSourceDetails +from google.cloud.dlp_v2.types.dlp import InspectionRule +from google.cloud.dlp_v2.types.dlp import InspectionRuleSet +from google.cloud.dlp_v2.types.dlp import InspectJobConfig +from google.cloud.dlp_v2.types.dlp import InspectResult +from google.cloud.dlp_v2.types.dlp import InspectTemplate +from google.cloud.dlp_v2.types.dlp import JobTrigger +from google.cloud.dlp_v2.types.dlp import KmsWrappedCryptoKey +from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryConfig +from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryStats +from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesRequest +from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesResponse +from google.cloud.dlp_v2.types.dlp import ListDlpJobsRequest +from google.cloud.dlp_v2.types.dlp import ListDlpJobsResponse +from google.cloud.dlp_v2.types.dlp import ListInfoTypesRequest +from google.cloud.dlp_v2.types.dlp import ListInfoTypesResponse +from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesRequest +from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesResponse +from google.cloud.dlp_v2.types.dlp import ListJobTriggersRequest +from google.cloud.dlp_v2.types.dlp import ListJobTriggersResponse +from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesRequest +from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesResponse +from google.cloud.dlp_v2.types.dlp import Location +from google.cloud.dlp_v2.types.dlp import Manual +from google.cloud.dlp_v2.types.dlp import MetadataLocation +from google.cloud.dlp_v2.types.dlp import OtherInfoTypeSummary +from google.cloud.dlp_v2.types.dlp import OutputStorageConfig +from google.cloud.dlp_v2.types.dlp import PrimitiveTransformation +from google.cloud.dlp_v2.types.dlp import PrivacyMetric +from google.cloud.dlp_v2.types.dlp import ProfileStatus +from google.cloud.dlp_v2.types.dlp import QuasiId +from google.cloud.dlp_v2.types.dlp import QuoteInfo +from google.cloud.dlp_v2.types.dlp import Range +from google.cloud.dlp_v2.types.dlp import RecordCondition +from google.cloud.dlp_v2.types.dlp import RecordLocation +from google.cloud.dlp_v2.types.dlp import RecordSuppression +from google.cloud.dlp_v2.types.dlp import RecordTransformation +from google.cloud.dlp_v2.types.dlp import RecordTransformations +from google.cloud.dlp_v2.types.dlp import RedactConfig +from google.cloud.dlp_v2.types.dlp import RedactImageRequest +from google.cloud.dlp_v2.types.dlp import RedactImageResponse +from google.cloud.dlp_v2.types.dlp import ReidentifyContentRequest +from google.cloud.dlp_v2.types.dlp import ReidentifyContentResponse +from google.cloud.dlp_v2.types.dlp import ReplaceDictionaryConfig +from google.cloud.dlp_v2.types.dlp import ReplaceValueConfig +from google.cloud.dlp_v2.types.dlp import ReplaceWithInfoTypeConfig +from google.cloud.dlp_v2.types.dlp import RiskAnalysisJobConfig +from google.cloud.dlp_v2.types.dlp import Schedule +from google.cloud.dlp_v2.types.dlp import StatisticalTable +from google.cloud.dlp_v2.types.dlp import StorageMetadataLabel +from google.cloud.dlp_v2.types.dlp import StoredInfoType +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeConfig +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeStats +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeVersion +from google.cloud.dlp_v2.types.dlp import Table +from google.cloud.dlp_v2.types.dlp import TableDataProfile +from google.cloud.dlp_v2.types.dlp import TableLocation +from google.cloud.dlp_v2.types.dlp import TimePartConfig +from google.cloud.dlp_v2.types.dlp import TransformationConfig +from google.cloud.dlp_v2.types.dlp import TransformationDescription +from google.cloud.dlp_v2.types.dlp import TransformationDetails +from google.cloud.dlp_v2.types.dlp import TransformationDetailsStorageConfig +from google.cloud.dlp_v2.types.dlp import TransformationErrorHandling +from google.cloud.dlp_v2.types.dlp import TransformationLocation +from google.cloud.dlp_v2.types.dlp import TransformationOverview +from google.cloud.dlp_v2.types.dlp import TransformationResultStatus +from google.cloud.dlp_v2.types.dlp import TransformationSummary +from google.cloud.dlp_v2.types.dlp import TransientCryptoKey +from google.cloud.dlp_v2.types.dlp import UnwrappedCryptoKey +from google.cloud.dlp_v2.types.dlp import UpdateDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import UpdateInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import UpdateJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import UpdateStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import Value +from google.cloud.dlp_v2.types.dlp import ValueFrequency +from google.cloud.dlp_v2.types.dlp import VersionDescription +from google.cloud.dlp_v2.types.dlp import ContentOption +from google.cloud.dlp_v2.types.dlp import DlpJobType +from google.cloud.dlp_v2.types.dlp import EncryptionStatus +from google.cloud.dlp_v2.types.dlp import InfoTypeSupportedBy +from google.cloud.dlp_v2.types.dlp import MatchingType +from google.cloud.dlp_v2.types.dlp import MetadataType +from google.cloud.dlp_v2.types.dlp import RelationalOperator +from google.cloud.dlp_v2.types.dlp import ResourceVisibility +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeState +from google.cloud.dlp_v2.types.dlp import TransformationContainerType +from google.cloud.dlp_v2.types.dlp import TransformationResultStatusType +from google.cloud.dlp_v2.types.dlp import TransformationType +from google.cloud.dlp_v2.types.storage import BigQueryField +from google.cloud.dlp_v2.types.storage import BigQueryKey +from google.cloud.dlp_v2.types.storage import BigQueryOptions +from google.cloud.dlp_v2.types.storage import BigQueryTable +from google.cloud.dlp_v2.types.storage import CloudStorageFileSet +from google.cloud.dlp_v2.types.storage import CloudStorageOptions +from google.cloud.dlp_v2.types.storage import CloudStoragePath +from google.cloud.dlp_v2.types.storage import CloudStorageRegexFileSet +from google.cloud.dlp_v2.types.storage import CustomInfoType +from google.cloud.dlp_v2.types.storage import DatastoreKey +from google.cloud.dlp_v2.types.storage import DatastoreOptions +from google.cloud.dlp_v2.types.storage import EntityId +from google.cloud.dlp_v2.types.storage import FieldId +from google.cloud.dlp_v2.types.storage import HybridOptions +from google.cloud.dlp_v2.types.storage import InfoType +from google.cloud.dlp_v2.types.storage import Key +from google.cloud.dlp_v2.types.storage import KindExpression +from google.cloud.dlp_v2.types.storage import PartitionId +from google.cloud.dlp_v2.types.storage import RecordKey +from google.cloud.dlp_v2.types.storage import SensitivityScore +from google.cloud.dlp_v2.types.storage import StorageConfig +from google.cloud.dlp_v2.types.storage import StoredType +from google.cloud.dlp_v2.types.storage import TableOptions +from google.cloud.dlp_v2.types.storage import FileType +from google.cloud.dlp_v2.types.storage import Likelihood + +__all__ = ('DlpServiceClient', + 'DlpServiceAsyncClient', + 'Action', + 'ActivateJobTriggerRequest', + 'AnalyzeDataSourceRiskDetails', + 'BoundingBox', + 'BucketingConfig', + 'ByteContentItem', + 'CancelDlpJobRequest', + 'CharacterMaskConfig', + 'CharsToIgnore', + 'Color', + 'Container', + 'ContentItem', + 'ContentLocation', + 'CreateDeidentifyTemplateRequest', + 'CreateDlpJobRequest', + 'CreateInspectTemplateRequest', + 'CreateJobTriggerRequest', + 'CreateStoredInfoTypeRequest', + 'CryptoDeterministicConfig', + 'CryptoHashConfig', + 'CryptoKey', + 'CryptoReplaceFfxFpeConfig', + 'DataProfileAction', + 'DataProfileConfigSnapshot', + 'DataProfileJobConfig', + 'DataProfileLocation', + 'DataProfilePubSubCondition', + 'DataProfilePubSubMessage', + 'DataRiskLevel', + 'DateShiftConfig', + 'DateTime', + 'DeidentifyConfig', + 'DeidentifyContentRequest', + 'DeidentifyContentResponse', + 'DeidentifyTemplate', + 'DeleteDeidentifyTemplateRequest', + 'DeleteDlpJobRequest', + 'DeleteInspectTemplateRequest', + 'DeleteJobTriggerRequest', + 'DeleteStoredInfoTypeRequest', + 'DlpJob', + 'DocumentLocation', + 'Error', + 'ExcludeByHotword', + 'ExcludeInfoTypes', + 'ExclusionRule', + 'FieldTransformation', + 'Finding', + 'FinishDlpJobRequest', + 'FixedSizeBucketingConfig', + 'GetDeidentifyTemplateRequest', + 'GetDlpJobRequest', + 'GetInspectTemplateRequest', + 'GetJobTriggerRequest', + 'GetStoredInfoTypeRequest', + 'HybridContentItem', + 'HybridFindingDetails', + 'HybridInspectDlpJobRequest', + 'HybridInspectJobTriggerRequest', + 'HybridInspectResponse', + 'HybridInspectStatistics', + 'ImageLocation', + 'ImageTransformations', + 'InfoTypeCategory', + 'InfoTypeDescription', + 'InfoTypeStats', + 'InfoTypeSummary', + 'InfoTypeTransformations', + 'InspectConfig', + 'InspectContentRequest', + 'InspectContentResponse', + 'InspectDataSourceDetails', + 'InspectionRule', + 'InspectionRuleSet', + 'InspectJobConfig', + 'InspectResult', + 'InspectTemplate', + 'JobTrigger', + 'KmsWrappedCryptoKey', + 'LargeCustomDictionaryConfig', + 'LargeCustomDictionaryStats', + 'ListDeidentifyTemplatesRequest', + 'ListDeidentifyTemplatesResponse', + 'ListDlpJobsRequest', + 'ListDlpJobsResponse', + 'ListInfoTypesRequest', + 'ListInfoTypesResponse', + 'ListInspectTemplatesRequest', + 'ListInspectTemplatesResponse', + 'ListJobTriggersRequest', + 'ListJobTriggersResponse', + 'ListStoredInfoTypesRequest', + 'ListStoredInfoTypesResponse', + 'Location', + 'Manual', + 'MetadataLocation', + 'OtherInfoTypeSummary', + 'OutputStorageConfig', + 'PrimitiveTransformation', + 'PrivacyMetric', + 'ProfileStatus', + 'QuasiId', + 'QuoteInfo', + 'Range', + 'RecordCondition', + 'RecordLocation', + 'RecordSuppression', + 'RecordTransformation', + 'RecordTransformations', + 'RedactConfig', + 'RedactImageRequest', + 'RedactImageResponse', + 'ReidentifyContentRequest', + 'ReidentifyContentResponse', + 'ReplaceDictionaryConfig', + 'ReplaceValueConfig', + 'ReplaceWithInfoTypeConfig', + 'RiskAnalysisJobConfig', + 'Schedule', + 'StatisticalTable', + 'StorageMetadataLabel', + 'StoredInfoType', + 'StoredInfoTypeConfig', + 'StoredInfoTypeStats', + 'StoredInfoTypeVersion', + 'Table', + 'TableDataProfile', + 'TableLocation', + 'TimePartConfig', + 'TransformationConfig', + 'TransformationDescription', + 'TransformationDetails', + 'TransformationDetailsStorageConfig', + 'TransformationErrorHandling', + 'TransformationLocation', + 'TransformationOverview', + 'TransformationResultStatus', + 'TransformationSummary', + 'TransientCryptoKey', + 'UnwrappedCryptoKey', + 'UpdateDeidentifyTemplateRequest', + 'UpdateInspectTemplateRequest', + 'UpdateJobTriggerRequest', + 'UpdateStoredInfoTypeRequest', + 'Value', + 'ValueFrequency', + 'VersionDescription', + 'ContentOption', + 'DlpJobType', + 'EncryptionStatus', + 'InfoTypeSupportedBy', + 'MatchingType', + 'MetadataType', + 'RelationalOperator', + 'ResourceVisibility', + 'StoredInfoTypeState', + 'TransformationContainerType', + 'TransformationResultStatusType', + 'TransformationType', + 'BigQueryField', + 'BigQueryKey', + 'BigQueryOptions', + 'BigQueryTable', + 'CloudStorageFileSet', + 'CloudStorageOptions', + 'CloudStoragePath', + 'CloudStorageRegexFileSet', + 'CustomInfoType', + 'DatastoreKey', + 'DatastoreOptions', + 'EntityId', + 'FieldId', + 'HybridOptions', + 'InfoType', + 'Key', + 'KindExpression', + 'PartitionId', + 'RecordKey', + 'SensitivityScore', + 'StorageConfig', + 'StoredType', + 'TableOptions', + 'FileType', + 'Likelihood', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp/gapic_version.py b/owl-bot-staging/v2/google/cloud/dlp/gapic_version.py new file mode 100644 index 00000000..405b1ceb --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/dlp/py.typed b/owl-bot-staging/v2/google/cloud/dlp/py.typed new file mode 100644 index 00000000..23d89ef3 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py new file mode 100644 index 00000000..8397a3ad --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py @@ -0,0 +1,396 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dlp_v2 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.dlp_service import DlpServiceClient +from .services.dlp_service import DlpServiceAsyncClient + +from .types.dlp import Action +from .types.dlp import ActivateJobTriggerRequest +from .types.dlp import AnalyzeDataSourceRiskDetails +from .types.dlp import BoundingBox +from .types.dlp import BucketingConfig +from .types.dlp import ByteContentItem +from .types.dlp import CancelDlpJobRequest +from .types.dlp import CharacterMaskConfig +from .types.dlp import CharsToIgnore +from .types.dlp import Color +from .types.dlp import Container +from .types.dlp import ContentItem +from .types.dlp import ContentLocation +from .types.dlp import CreateDeidentifyTemplateRequest +from .types.dlp import CreateDlpJobRequest +from .types.dlp import CreateInspectTemplateRequest +from .types.dlp import CreateJobTriggerRequest +from .types.dlp import CreateStoredInfoTypeRequest +from .types.dlp import CryptoDeterministicConfig +from .types.dlp import CryptoHashConfig +from .types.dlp import CryptoKey +from .types.dlp import CryptoReplaceFfxFpeConfig +from .types.dlp import DataProfileAction +from .types.dlp import DataProfileConfigSnapshot +from .types.dlp import DataProfileJobConfig +from .types.dlp import DataProfileLocation +from .types.dlp import DataProfilePubSubCondition +from .types.dlp import DataProfilePubSubMessage +from .types.dlp import DataRiskLevel +from .types.dlp import DateShiftConfig +from .types.dlp import DateTime +from .types.dlp import DeidentifyConfig +from .types.dlp import DeidentifyContentRequest +from .types.dlp import DeidentifyContentResponse +from .types.dlp import DeidentifyTemplate +from .types.dlp import DeleteDeidentifyTemplateRequest +from .types.dlp import DeleteDlpJobRequest +from .types.dlp import DeleteInspectTemplateRequest +from .types.dlp import DeleteJobTriggerRequest +from .types.dlp import DeleteStoredInfoTypeRequest +from .types.dlp import DlpJob +from .types.dlp import DocumentLocation +from .types.dlp import Error +from .types.dlp import ExcludeByHotword +from .types.dlp import ExcludeInfoTypes +from .types.dlp import ExclusionRule +from .types.dlp import FieldTransformation +from .types.dlp import Finding +from .types.dlp import FinishDlpJobRequest +from .types.dlp import FixedSizeBucketingConfig +from .types.dlp import GetDeidentifyTemplateRequest +from .types.dlp import GetDlpJobRequest +from .types.dlp import GetInspectTemplateRequest +from .types.dlp import GetJobTriggerRequest +from .types.dlp import GetStoredInfoTypeRequest +from .types.dlp import HybridContentItem +from .types.dlp import HybridFindingDetails +from .types.dlp import HybridInspectDlpJobRequest +from .types.dlp import HybridInspectJobTriggerRequest +from .types.dlp import HybridInspectResponse +from .types.dlp import HybridInspectStatistics +from .types.dlp import ImageLocation +from .types.dlp import ImageTransformations +from .types.dlp import InfoTypeCategory +from .types.dlp import InfoTypeDescription +from .types.dlp import InfoTypeStats +from .types.dlp import InfoTypeSummary +from .types.dlp import InfoTypeTransformations +from .types.dlp import InspectConfig +from .types.dlp import InspectContentRequest +from .types.dlp import InspectContentResponse +from .types.dlp import InspectDataSourceDetails +from .types.dlp import InspectionRule +from .types.dlp import InspectionRuleSet +from .types.dlp import InspectJobConfig +from .types.dlp import InspectResult +from .types.dlp import InspectTemplate +from .types.dlp import JobTrigger +from .types.dlp import KmsWrappedCryptoKey +from .types.dlp import LargeCustomDictionaryConfig +from .types.dlp import LargeCustomDictionaryStats +from .types.dlp import ListDeidentifyTemplatesRequest +from .types.dlp import ListDeidentifyTemplatesResponse +from .types.dlp import ListDlpJobsRequest +from .types.dlp import ListDlpJobsResponse +from .types.dlp import ListInfoTypesRequest +from .types.dlp import ListInfoTypesResponse +from .types.dlp import ListInspectTemplatesRequest +from .types.dlp import ListInspectTemplatesResponse +from .types.dlp import ListJobTriggersRequest +from .types.dlp import ListJobTriggersResponse +from .types.dlp import ListStoredInfoTypesRequest +from .types.dlp import ListStoredInfoTypesResponse +from .types.dlp import Location +from .types.dlp import Manual +from .types.dlp import MetadataLocation +from .types.dlp import OtherInfoTypeSummary +from .types.dlp import OutputStorageConfig +from .types.dlp import PrimitiveTransformation +from .types.dlp import PrivacyMetric +from .types.dlp import ProfileStatus +from .types.dlp import QuasiId +from .types.dlp import QuoteInfo +from .types.dlp import Range +from .types.dlp import RecordCondition +from .types.dlp import RecordLocation +from .types.dlp import RecordSuppression +from .types.dlp import RecordTransformation +from .types.dlp import RecordTransformations +from .types.dlp import RedactConfig +from .types.dlp import RedactImageRequest +from .types.dlp import RedactImageResponse +from .types.dlp import ReidentifyContentRequest +from .types.dlp import ReidentifyContentResponse +from .types.dlp import ReplaceDictionaryConfig +from .types.dlp import ReplaceValueConfig +from .types.dlp import ReplaceWithInfoTypeConfig +from .types.dlp import RiskAnalysisJobConfig +from .types.dlp import Schedule +from .types.dlp import StatisticalTable +from .types.dlp import StorageMetadataLabel +from .types.dlp import StoredInfoType +from .types.dlp import StoredInfoTypeConfig +from .types.dlp import StoredInfoTypeStats +from .types.dlp import StoredInfoTypeVersion +from .types.dlp import Table +from .types.dlp import TableDataProfile +from .types.dlp import TableLocation +from .types.dlp import TimePartConfig +from .types.dlp import TransformationConfig +from .types.dlp import TransformationDescription +from .types.dlp import TransformationDetails +from .types.dlp import TransformationDetailsStorageConfig +from .types.dlp import TransformationErrorHandling +from .types.dlp import TransformationLocation +from .types.dlp import TransformationOverview +from .types.dlp import TransformationResultStatus +from .types.dlp import TransformationSummary +from .types.dlp import TransientCryptoKey +from .types.dlp import UnwrappedCryptoKey +from .types.dlp import UpdateDeidentifyTemplateRequest +from .types.dlp import UpdateInspectTemplateRequest +from .types.dlp import UpdateJobTriggerRequest +from .types.dlp import UpdateStoredInfoTypeRequest +from .types.dlp import Value +from .types.dlp import ValueFrequency +from .types.dlp import VersionDescription +from .types.dlp import ContentOption +from .types.dlp import DlpJobType +from .types.dlp import EncryptionStatus +from .types.dlp import InfoTypeSupportedBy +from .types.dlp import MatchingType +from .types.dlp import MetadataType +from .types.dlp import RelationalOperator +from .types.dlp import ResourceVisibility +from .types.dlp import StoredInfoTypeState +from .types.dlp import TransformationContainerType +from .types.dlp import TransformationResultStatusType +from .types.dlp import TransformationType +from .types.storage import BigQueryField +from .types.storage import BigQueryKey +from .types.storage import BigQueryOptions +from .types.storage import BigQueryTable +from .types.storage import CloudStorageFileSet +from .types.storage import CloudStorageOptions +from .types.storage import CloudStoragePath +from .types.storage import CloudStorageRegexFileSet +from .types.storage import CustomInfoType +from .types.storage import DatastoreKey +from .types.storage import DatastoreOptions +from .types.storage import EntityId +from .types.storage import FieldId +from .types.storage import HybridOptions +from .types.storage import InfoType +from .types.storage import Key +from .types.storage import KindExpression +from .types.storage import PartitionId +from .types.storage import RecordKey +from .types.storage import SensitivityScore +from .types.storage import StorageConfig +from .types.storage import StoredType +from .types.storage import TableOptions +from .types.storage import FileType +from .types.storage import Likelihood + +__all__ = ( + 'DlpServiceAsyncClient', +'Action', +'ActivateJobTriggerRequest', +'AnalyzeDataSourceRiskDetails', +'BigQueryField', +'BigQueryKey', +'BigQueryOptions', +'BigQueryTable', +'BoundingBox', +'BucketingConfig', +'ByteContentItem', +'CancelDlpJobRequest', +'CharacterMaskConfig', +'CharsToIgnore', +'CloudStorageFileSet', +'CloudStorageOptions', +'CloudStoragePath', +'CloudStorageRegexFileSet', +'Color', +'Container', +'ContentItem', +'ContentLocation', +'ContentOption', +'CreateDeidentifyTemplateRequest', +'CreateDlpJobRequest', +'CreateInspectTemplateRequest', +'CreateJobTriggerRequest', +'CreateStoredInfoTypeRequest', +'CryptoDeterministicConfig', +'CryptoHashConfig', +'CryptoKey', +'CryptoReplaceFfxFpeConfig', +'CustomInfoType', +'DataProfileAction', +'DataProfileConfigSnapshot', +'DataProfileJobConfig', +'DataProfileLocation', +'DataProfilePubSubCondition', +'DataProfilePubSubMessage', +'DataRiskLevel', +'DatastoreKey', +'DatastoreOptions', +'DateShiftConfig', +'DateTime', +'DeidentifyConfig', +'DeidentifyContentRequest', +'DeidentifyContentResponse', +'DeidentifyTemplate', +'DeleteDeidentifyTemplateRequest', +'DeleteDlpJobRequest', +'DeleteInspectTemplateRequest', +'DeleteJobTriggerRequest', +'DeleteStoredInfoTypeRequest', +'DlpJob', +'DlpJobType', +'DlpServiceClient', +'DocumentLocation', +'EncryptionStatus', +'EntityId', +'Error', +'ExcludeByHotword', +'ExcludeInfoTypes', +'ExclusionRule', +'FieldId', +'FieldTransformation', +'FileType', +'Finding', +'FinishDlpJobRequest', +'FixedSizeBucketingConfig', +'GetDeidentifyTemplateRequest', +'GetDlpJobRequest', +'GetInspectTemplateRequest', +'GetJobTriggerRequest', +'GetStoredInfoTypeRequest', +'HybridContentItem', +'HybridFindingDetails', +'HybridInspectDlpJobRequest', +'HybridInspectJobTriggerRequest', +'HybridInspectResponse', +'HybridInspectStatistics', +'HybridOptions', +'ImageLocation', +'ImageTransformations', +'InfoType', +'InfoTypeCategory', +'InfoTypeDescription', +'InfoTypeStats', +'InfoTypeSummary', +'InfoTypeSupportedBy', +'InfoTypeTransformations', +'InspectConfig', +'InspectContentRequest', +'InspectContentResponse', +'InspectDataSourceDetails', +'InspectJobConfig', +'InspectResult', +'InspectTemplate', +'InspectionRule', +'InspectionRuleSet', +'JobTrigger', +'Key', +'KindExpression', +'KmsWrappedCryptoKey', +'LargeCustomDictionaryConfig', +'LargeCustomDictionaryStats', +'Likelihood', +'ListDeidentifyTemplatesRequest', +'ListDeidentifyTemplatesResponse', +'ListDlpJobsRequest', +'ListDlpJobsResponse', +'ListInfoTypesRequest', +'ListInfoTypesResponse', +'ListInspectTemplatesRequest', +'ListInspectTemplatesResponse', +'ListJobTriggersRequest', +'ListJobTriggersResponse', +'ListStoredInfoTypesRequest', +'ListStoredInfoTypesResponse', +'Location', +'Manual', +'MatchingType', +'MetadataLocation', +'MetadataType', +'OtherInfoTypeSummary', +'OutputStorageConfig', +'PartitionId', +'PrimitiveTransformation', +'PrivacyMetric', +'ProfileStatus', +'QuasiId', +'QuoteInfo', +'Range', +'RecordCondition', +'RecordKey', +'RecordLocation', +'RecordSuppression', +'RecordTransformation', +'RecordTransformations', +'RedactConfig', +'RedactImageRequest', +'RedactImageResponse', +'ReidentifyContentRequest', +'ReidentifyContentResponse', +'RelationalOperator', +'ReplaceDictionaryConfig', +'ReplaceValueConfig', +'ReplaceWithInfoTypeConfig', +'ResourceVisibility', +'RiskAnalysisJobConfig', +'Schedule', +'SensitivityScore', +'StatisticalTable', +'StorageConfig', +'StorageMetadataLabel', +'StoredInfoType', +'StoredInfoTypeConfig', +'StoredInfoTypeState', +'StoredInfoTypeStats', +'StoredInfoTypeVersion', +'StoredType', +'Table', +'TableDataProfile', +'TableLocation', +'TableOptions', +'TimePartConfig', +'TransformationConfig', +'TransformationContainerType', +'TransformationDescription', +'TransformationDetails', +'TransformationDetailsStorageConfig', +'TransformationErrorHandling', +'TransformationLocation', +'TransformationOverview', +'TransformationResultStatus', +'TransformationResultStatusType', +'TransformationSummary', +'TransformationType', +'TransientCryptoKey', +'UnwrappedCryptoKey', +'UpdateDeidentifyTemplateRequest', +'UpdateInspectTemplateRequest', +'UpdateJobTriggerRequest', +'UpdateStoredInfoTypeRequest', +'Value', +'ValueFrequency', +'VersionDescription', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json new file mode 100644 index 00000000..634002d4 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json @@ -0,0 +1,538 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.dlp_v2", + "protoPackage": "google.privacy.dlp.v2", + "schema": "1.0", + "services": { + "DlpService": { + "clients": { + "grpc": { + "libraryClient": "DlpServiceClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DlpServiceAsyncClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + }, + "rest": { + "libraryClient": "DlpServiceClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py new file mode 100644 index 00000000..405b1ceb --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed b/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed new file mode 100644 index 00000000..23d89ef3 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py new file mode 100644 index 00000000..e8e1c384 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py new file mode 100644 index 00000000..aa9c062a --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DlpServiceClient +from .async_client import DlpServiceAsyncClient + +__all__ = ( + 'DlpServiceClient', + 'DlpServiceAsyncClient', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py new file mode 100644 index 00000000..ca29f0c5 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py @@ -0,0 +1,4142 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dlp_v2 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.types import dlp +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport +from .client import DlpServiceClient + + +class DlpServiceAsyncClient: + """The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + """ + + _client: DlpServiceClient + + DEFAULT_ENDPOINT = DlpServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DlpServiceClient.DEFAULT_MTLS_ENDPOINT + + deidentify_template_path = staticmethod(DlpServiceClient.deidentify_template_path) + parse_deidentify_template_path = staticmethod(DlpServiceClient.parse_deidentify_template_path) + dlp_content_path = staticmethod(DlpServiceClient.dlp_content_path) + parse_dlp_content_path = staticmethod(DlpServiceClient.parse_dlp_content_path) + dlp_job_path = staticmethod(DlpServiceClient.dlp_job_path) + parse_dlp_job_path = staticmethod(DlpServiceClient.parse_dlp_job_path) + finding_path = staticmethod(DlpServiceClient.finding_path) + parse_finding_path = staticmethod(DlpServiceClient.parse_finding_path) + inspect_template_path = staticmethod(DlpServiceClient.inspect_template_path) + parse_inspect_template_path = staticmethod(DlpServiceClient.parse_inspect_template_path) + job_trigger_path = staticmethod(DlpServiceClient.job_trigger_path) + parse_job_trigger_path = staticmethod(DlpServiceClient.parse_job_trigger_path) + stored_info_type_path = staticmethod(DlpServiceClient.stored_info_type_path) + parse_stored_info_type_path = staticmethod(DlpServiceClient.parse_stored_info_type_path) + common_billing_account_path = staticmethod(DlpServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(DlpServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(DlpServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(DlpServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(DlpServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(DlpServiceClient.parse_common_organization_path) + common_project_path = staticmethod(DlpServiceClient.common_project_path) + parse_common_project_path = staticmethod(DlpServiceClient.parse_common_project_path) + common_location_path = staticmethod(DlpServiceClient.common_location_path) + parse_common_location_path = staticmethod(DlpServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceAsyncClient: The constructed client. + """ + return DlpServiceClient.from_service_account_info.__func__(DlpServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceAsyncClient: The constructed client. + """ + return DlpServiceClient.from_service_account_file.__func__(DlpServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DlpServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DlpServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DlpServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(DlpServiceClient).get_transport_class, type(DlpServiceClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DlpServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dlp service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DlpServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DlpServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def inspect_content(self, + request: Optional[Union[dlp.InspectContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectContentResponse: + r"""Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_inspect_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.InspectContentRequest( + ) + + # Make the request + response = await client.inspect_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.InspectContentRequest, dict]]): + The request object. Request to search for potentially + sensitive info in a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectContentResponse: + Results of inspecting an item. + """ + # Create or coerce a protobuf request object. + request = dlp.InspectContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.inspect_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def redact_image(self, + request: Optional[Union[dlp.RedactImageRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.RedactImageResponse: + r"""Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_redact_image(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.RedactImageRequest( + ) + + # Make the request + response = await client.redact_image(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.RedactImageRequest, dict]]): + The request object. Request to search for potentially + sensitive info in an image and redact it by covering it + with a colored rectangle. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.RedactImageResponse: + Results of redacting an image. + """ + # Create or coerce a protobuf request object. + request = dlp.RedactImageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.redact_image, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def deidentify_content(self, + request: Optional[Union[dlp.DeidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyContentResponse: + r"""De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_deidentify_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeidentifyContentRequest( + ) + + # Make the request + response = await client.deidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeidentifyContentRequest, dict]]): + The request object. Request to de-identify a + ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + # Create or coerce a protobuf request object. + request = dlp.DeidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.deidentify_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def reidentify_content(self, + request: Optional[Union[dlp.ReidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ReidentifyContentResponse: + r"""Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_reidentify_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ReidentifyContentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.reidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ReidentifyContentRequest, dict]]): + The request object. Request to re-identify an item. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ReidentifyContentResponse: + Results of re-identifying an item. + """ + # Create or coerce a protobuf request object. + request = dlp.ReidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reidentify_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_info_types(self, + request: Optional[Union[dlp.ListInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListInfoTypesResponse: + r"""Returns a list of the sensitive information types + that DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_info_types(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListInfoTypesRequest( + ) + + # Make the request + response = await client.list_info_types(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListInfoTypesRequest, dict]]): + The request object. Request for the list of infoTypes. + parent (:class:`str`): + The parent resource name. + + The format of this value is as follows: + + :: + + locations/LOCATION_ID + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListInfoTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_info_types, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_inspect_template(self, + request: Optional[Union[dlp.CreateInspectTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/dlp/docs/creating-templates to + learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateInspectTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateInspectTemplateRequest, dict]]): + The request object. Request message for + CreateInspectTemplate. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): + Required. The InspectTemplate to + create. + + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_template]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_template is not None: + request.inspect_template = inspect_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_inspect_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_inspect_template(self, + request: Optional[Union[dlp.UpdateInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.update_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateInspectTemplateRequest, dict]]): + The request object. Request message for + UpdateInspectTemplate. + name (:class:`str`): + Required. Resource name of organization and + inspectTemplate to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): + New InspectTemplate value. + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, inspect_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.UpdateInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if inspect_template is not None: + request.inspect_template = inspect_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_inspect_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_inspect_template(self, + request: Optional[Union[dlp.GetInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetInspectTemplateRequest, dict]]): + The request object. Request message for + GetInspectTemplate. + name (:class:`str`): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_inspect_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_inspect_templates(self, + request: Optional[Union[dlp.ListInspectTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInspectTemplatesAsyncPager: + r"""Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_inspect_templates(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListInspectTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inspect_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListInspectTemplatesRequest, dict]]): + The request object. Request message for + ListInspectTemplates. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager: + Response message for + ListInspectTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListInspectTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_inspect_templates, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInspectTemplatesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_inspect_template(self, + request: Optional[Union[dlp.DeleteInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteInspectTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_inspect_template(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteInspectTemplateRequest, dict]]): + The request object. Request message for + DeleteInspectTemplate. + name (:class:`str`): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_inspect_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_deidentify_template(self, + request: Optional[Union[dlp.CreateDeidentifyTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDeidentifyTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest, dict]]): + The request object. Request message for + CreateDeidentifyTemplate. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): + Required. The DeidentifyTemplate to + create. + + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deidentify_template]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deidentify_template is not None: + request.deidentify_template = deidentify_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_deidentify_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_deidentify_template(self, + request: Optional[Union[dlp.UpdateDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Updates the DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.update_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest, dict]]): + The request object. Request message for + UpdateDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): + New DeidentifyTemplate value. + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, deidentify_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.UpdateDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if deidentify_template is not None: + request.deidentify_template = deidentify_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_deidentify_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_deidentify_template(self, + request: Optional[Union[dlp.GetDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Gets a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest, dict]]): + The request object. Request message for + GetDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of the organization and + deidentify template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_deidentify_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_deidentify_templates(self, + request: Optional[Union[dlp.ListDeidentifyTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeidentifyTemplatesAsyncPager: + r"""Lists DeidentifyTemplates. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_deidentify_templates(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDeidentifyTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deidentify_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest, dict]]): + The request object. Request message for + ListDeidentifyTemplates. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager: + Response message for + ListDeidentifyTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListDeidentifyTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_deidentify_templates, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDeidentifyTemplatesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_deidentify_template(self, + request: Optional[Union[dlp.DeleteDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_deidentify_template(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest, dict]]): + The request object. Request message for + DeleteDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of the organization and + deidentify template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_deidentify_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_job_trigger(self, + request: Optional[Union[dlp.CreateJobTriggerRequest, dict]] = None, + *, + parent: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + job_trigger = dlp_v2.JobTrigger() + job_trigger.status = "CANCELLED" + + request = dlp_v2.CreateJobTriggerRequest( + parent="parent_value", + job_trigger=job_trigger, + ) + + # Make the request + response = await client.create_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateJobTriggerRequest, dict]]): + The request object. Request message for + CreateJobTrigger. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): + Required. The JobTrigger to create. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job_trigger]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job_trigger is not None: + request.job_trigger = job_trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_job_trigger(self, + request: Optional[Union[dlp.UpdateJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Updates a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.update_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateJobTriggerRequest, dict]]): + The request object. Request message for + UpdateJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): + New JobTrigger value. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, job_trigger, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.UpdateJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if job_trigger is not None: + request.job_trigger = job_trigger + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def hybrid_inspect_job_trigger(self, + request: Optional[Union[dlp.HybridInspectJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_hybrid_inspect_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.hybrid_inspect_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest, dict]]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (:class:`str`): + Required. Resource name of the trigger to execute a + hybrid inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.HybridInspectJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.hybrid_inspect_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job_trigger(self, + request: Optional[Union[dlp.GetJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Gets a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetJobTriggerRequest, dict]]): + The request object. Request message for GetJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_job_triggers(self, + request: Optional[Union[dlp.ListJobTriggersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobTriggersAsyncPager: + r"""Lists job triggers. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_job_triggers(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListJobTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_triggers(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListJobTriggersRequest, dict]]): + The request object. Request message for ListJobTriggers. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager: + Response message for ListJobTriggers. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListJobTriggersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_job_triggers, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobTriggersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_job_trigger(self, + request: Optional[Union[dlp.DeleteJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteJobTriggerRequest( + name="name_value", + ) + + # Make the request + await client.delete_job_trigger(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteJobTriggerRequest, dict]]): + The request object. Request message for + DeleteJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_job_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def activate_job_trigger(self, + request: Optional[Union[dlp.ActivateJobTriggerRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_activate_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ActivateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.activate_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ActivateJobTriggerRequest, dict]]): + The request object. Request message for + ActivateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + request = dlp.ActivateJobTriggerRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.activate_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_dlp_job(self, + request: Optional[Union[dlp.CreateDlpJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_job: Optional[dlp.InspectJobConfig] = None, + risk_job: Optional[dlp.RiskAnalysisJobConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDlpJobRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateDlpJobRequest, dict]]): + The request object. Request message for + CreateDlpJobRequest. Used to initiate long running jobs + such as calculating risk metrics or inspecting Google + Cloud Storage. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_job (:class:`google.cloud.dlp_v2.types.InspectJobConfig`): + An inspection job scans a storage + repository for InfoTypes. + + This corresponds to the ``inspect_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + risk_job (:class:`google.cloud.dlp_v2.types.RiskAnalysisJobConfig`): + A risk analysis job calculates + re-identification risk metrics for a + BigQuery table. + + This corresponds to the ``risk_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_job, risk_job]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_job is not None: + request.inspect_job = inspect_job + if risk_job is not None: + request.risk_job = risk_job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_dlp_jobs(self, + request: Optional[Union[dlp.ListDlpJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDlpJobsAsyncPager: + r"""Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_dlp_jobs(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDlpJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dlp_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListDlpJobsRequest, dict]]): + The request object. The request message for listing DLP + jobs. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager: + The response message for listing DLP + jobs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListDlpJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_dlp_jobs, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDlpJobsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_dlp_job(self, + request: Optional[Union[dlp.GetDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDlpJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetDlpJobRequest, dict]]): + The request object. The request message for + [DlpJobs.GetDlpJob][]. + name (:class:`str`): + Required. The name of the DlpJob + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_dlp_job, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_dlp_job(self, + request: Optional[Union[dlp.DeleteDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.delete_dlp_job(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteDlpJobRequest, dict]]): + The request object. The request message for deleting a + DLP job. + name (:class:`str`): + Required. The name of the DlpJob + resource to be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_dlp_job, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_dlp_job(self, + request: Optional[Union[dlp.CancelDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_cancel_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CancelDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_dlp_job(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CancelDlpJobRequest, dict]]): + The request object. The request message for canceling a + DLP job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = dlp.CancelDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_stored_info_type(self, + request: Optional[Union[dlp.CreateStoredInfoTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateStoredInfoTypeRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest, dict]]): + The request object. Request message for + CreateStoredInfoType. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): + Required. Configuration of the + storedInfoType to create. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, config]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if config is not None: + request.config = config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_stored_info_type, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_stored_info_type(self, + request: Optional[Union[dlp.UpdateStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.update_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest, dict]]): + The request object. Request message for + UpdateStoredInfoType. + name (:class:`str`): + Required. Resource name of organization and + storedInfoType to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): + Updated configuration for the + storedInfoType. If not provided, a new + version of the storedInfoType will be + created with the existing configuration. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.UpdateStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if config is not None: + request.config = config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_stored_info_type, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_stored_info_type(self, + request: Optional[Union[dlp.GetStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Gets a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetStoredInfoTypeRequest, dict]]): + The request object. Request message for + GetStoredInfoType. + name (:class:`str`): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_stored_info_type, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_stored_info_types(self, + request: Optional[Union[dlp.ListStoredInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListStoredInfoTypesAsyncPager: + r"""Lists stored infoTypes. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_stored_info_types(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListStoredInfoTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_stored_info_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListStoredInfoTypesRequest, dict]]): + The request object. Request message for + ListStoredInfoTypes. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager: + Response message for + ListStoredInfoTypes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListStoredInfoTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_stored_info_types, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListStoredInfoTypesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_stored_info_type(self, + request: Optional[Union[dlp.DeleteStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + await client.delete_stored_info_type(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest, dict]]): + The request object. Request message for + DeleteStoredInfoType. + name (:class:`str`): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_stored_info_type, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def hybrid_inspect_dlp_job(self, + request: Optional[Union[dlp.HybridInspectDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_hybrid_inspect_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectDlpJobRequest( + name="name_value", + ) + + # Make the request + response = await client.hybrid_inspect_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.HybridInspectDlpJobRequest, dict]]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (:class:`str`): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.HybridInspectDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.hybrid_inspect_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def finish_dlp_job(self, + request: Optional[Union[dlp.FinishDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_finish_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.FinishDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.finish_dlp_job(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.FinishDlpJobRequest, dict]]): + The request object. The request message for finishing a + DLP hybrid job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = dlp.FinishDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.finish_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DlpServiceAsyncClient", +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py new file mode 100644 index 00000000..42eacf85 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py @@ -0,0 +1,4267 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.dlp_v2 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.types import dlp +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DlpServiceGrpcTransport +from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport +from .transports.rest import DlpServiceRestTransport + + +class DlpServiceClientMeta(type): + """Metaclass for the DlpService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] + _transport_registry["grpc"] = DlpServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DlpServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DlpServiceRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[DlpServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DlpServiceClient(metaclass=DlpServiceClientMeta): + """The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dlp.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DlpServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DlpServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def deidentify_template_path(organization: str,deidentify_template: str,) -> str: + """Returns a fully-qualified deidentify_template string.""" + return "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) + + @staticmethod + def parse_deidentify_template_path(path: str) -> Dict[str,str]: + """Parses a deidentify_template path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/deidentifyTemplates/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def dlp_content_path(project: str,) -> str: + """Returns a fully-qualified dlp_content string.""" + return "projects/{project}/dlpContent".format(project=project, ) + + @staticmethod + def parse_dlp_content_path(path: str) -> Dict[str,str]: + """Parses a dlp_content path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/dlpContent$", path) + return m.groupdict() if m else {} + + @staticmethod + def dlp_job_path(project: str,dlp_job: str,) -> str: + """Returns a fully-qualified dlp_job string.""" + return "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) + + @staticmethod + def parse_dlp_job_path(path: str) -> Dict[str,str]: + """Parses a dlp_job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/dlpJobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def finding_path(project: str,location: str,finding: str,) -> str: + """Returns a fully-qualified finding string.""" + return "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) + + @staticmethod + def parse_finding_path(path: str) -> Dict[str,str]: + """Parses a finding path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/findings/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def inspect_template_path(organization: str,inspect_template: str,) -> str: + """Returns a fully-qualified inspect_template string.""" + return "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) + + @staticmethod + def parse_inspect_template_path(path: str) -> Dict[str,str]: + """Parses a inspect_template path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/inspectTemplates/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def job_trigger_path(project: str,job_trigger: str,) -> str: + """Returns a fully-qualified job_trigger string.""" + return "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) + + @staticmethod + def parse_job_trigger_path(path: str) -> Dict[str,str]: + """Parses a job_trigger path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/jobTriggers/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def stored_info_type_path(organization: str,stored_info_type: str,) -> str: + """Returns a fully-qualified stored_info_type string.""" + return "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) + + @staticmethod + def parse_stored_info_type_path(path: str) -> Dict[str,str]: + """Parses a stored_info_type path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/storedInfoTypes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DlpServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dlp service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DlpServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DlpServiceTransport): + # transport is a DlpServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def inspect_content(self, + request: Optional[Union[dlp.InspectContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectContentResponse: + r"""Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_inspect_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.InspectContentRequest( + ) + + # Make the request + response = client.inspect_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.InspectContentRequest, dict]): + The request object. Request to search for potentially + sensitive info in a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectContentResponse: + Results of inspecting an item. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.InspectContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.InspectContentRequest): + request = dlp.InspectContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.inspect_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def redact_image(self, + request: Optional[Union[dlp.RedactImageRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.RedactImageResponse: + r"""Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_redact_image(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.RedactImageRequest( + ) + + # Make the request + response = client.redact_image(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.RedactImageRequest, dict]): + The request object. Request to search for potentially + sensitive info in an image and redact it by covering it + with a colored rectangle. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.RedactImageResponse: + Results of redacting an image. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.RedactImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.RedactImageRequest): + request = dlp.RedactImageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.redact_image] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def deidentify_content(self, + request: Optional[Union[dlp.DeidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyContentResponse: + r"""De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_deidentify_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeidentifyContentRequest( + ) + + # Make the request + response = client.deidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.DeidentifyContentRequest, dict]): + The request object. Request to de-identify a + ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeidentifyContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeidentifyContentRequest): + request = dlp.DeidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.deidentify_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def reidentify_content(self, + request: Optional[Union[dlp.ReidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ReidentifyContentResponse: + r"""Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_reidentify_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ReidentifyContentRequest( + parent="parent_value", + ) + + # Make the request + response = client.reidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ReidentifyContentRequest, dict]): + The request object. Request to re-identify an item. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ReidentifyContentResponse: + Results of re-identifying an item. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ReidentifyContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ReidentifyContentRequest): + request = dlp.ReidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reidentify_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_info_types(self, + request: Optional[Union[dlp.ListInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListInfoTypesResponse: + r"""Returns a list of the sensitive information types + that DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_info_types(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListInfoTypesRequest( + ) + + # Make the request + response = client.list_info_types(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListInfoTypesRequest, dict]): + The request object. Request for the list of infoTypes. + parent (str): + The parent resource name. + + The format of this value is as follows: + + :: + + locations/LOCATION_ID + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListInfoTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListInfoTypesRequest): + request = dlp.ListInfoTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_info_types] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_inspect_template(self, + request: Optional[Union[dlp.CreateInspectTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/dlp/docs/creating-templates to + learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateInspectTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateInspectTemplateRequest, dict]): + The request object. Request message for + CreateInspectTemplate. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + Required. The InspectTemplate to + create. + + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateInspectTemplateRequest): + request = dlp.CreateInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_template is not None: + request.inspect_template = inspect_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_inspect_template(self, + request: Optional[Union[dlp.UpdateInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.update_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateInspectTemplateRequest, dict]): + The request object. Request message for + UpdateInspectTemplate. + name (str): + Required. Resource name of organization and + inspectTemplate to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + New InspectTemplate value. + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, inspect_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateInspectTemplateRequest): + request = dlp.UpdateInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if inspect_template is not None: + request.inspect_template = inspect_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_inspect_template(self, + request: Optional[Union[dlp.GetInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetInspectTemplateRequest, dict]): + The request object. Request message for + GetInspectTemplate. + name (str): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetInspectTemplateRequest): + request = dlp.GetInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_inspect_templates(self, + request: Optional[Union[dlp.ListInspectTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInspectTemplatesPager: + r"""Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_inspect_templates(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListInspectTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inspect_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListInspectTemplatesRequest, dict]): + The request object. Request message for + ListInspectTemplates. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager: + Response message for + ListInspectTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListInspectTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListInspectTemplatesRequest): + request = dlp.ListInspectTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_inspect_templates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInspectTemplatesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_inspect_template(self, + request: Optional[Union[dlp.DeleteInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteInspectTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_inspect_template(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteInspectTemplateRequest, dict]): + The request object. Request message for + DeleteInspectTemplate. + name (str): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteInspectTemplateRequest): + request = dlp.DeleteInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_deidentify_template(self, + request: Optional[Union[dlp.CreateDeidentifyTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDeidentifyTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest, dict]): + The request object. Request message for + CreateDeidentifyTemplate. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + Required. The DeidentifyTemplate to + create. + + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deidentify_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateDeidentifyTemplateRequest): + request = dlp.CreateDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deidentify_template is not None: + request.deidentify_template = deidentify_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_deidentify_template(self, + request: Optional[Union[dlp.UpdateDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Updates the DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.update_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest, dict]): + The request object. Request message for + UpdateDeidentifyTemplate. + name (str): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + New DeidentifyTemplate value. + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, deidentify_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateDeidentifyTemplateRequest): + request = dlp.UpdateDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if deidentify_template is not None: + request.deidentify_template = deidentify_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_deidentify_template(self, + request: Optional[Union[dlp.GetDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Gets a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest, dict]): + The request object. Request message for + GetDeidentifyTemplate. + name (str): + Required. Resource name of the organization and + deidentify template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetDeidentifyTemplateRequest): + request = dlp.GetDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_deidentify_templates(self, + request: Optional[Union[dlp.ListDeidentifyTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeidentifyTemplatesPager: + r"""Lists DeidentifyTemplates. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_deidentify_templates(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDeidentifyTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deidentify_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest, dict]): + The request object. Request message for + ListDeidentifyTemplates. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager: + Response message for + ListDeidentifyTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListDeidentifyTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListDeidentifyTemplatesRequest): + request = dlp.ListDeidentifyTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_deidentify_templates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDeidentifyTemplatesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_deidentify_template(self, + request: Optional[Union[dlp.DeleteDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_deidentify_template(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest, dict]): + The request object. Request message for + DeleteDeidentifyTemplate. + name (str): + Required. Resource name of the organization and + deidentify template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteDeidentifyTemplateRequest): + request = dlp.DeleteDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_job_trigger(self, + request: Optional[Union[dlp.CreateJobTriggerRequest, dict]] = None, + *, + parent: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + job_trigger = dlp_v2.JobTrigger() + job_trigger.status = "CANCELLED" + + request = dlp_v2.CreateJobTriggerRequest( + parent="parent_value", + job_trigger=job_trigger, + ) + + # Make the request + response = client.create_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateJobTriggerRequest, dict]): + The request object. Request message for + CreateJobTrigger. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + Required. The JobTrigger to create. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job_trigger]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateJobTriggerRequest): + request = dlp.CreateJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job_trigger is not None: + request.job_trigger = job_trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_job_trigger(self, + request: Optional[Union[dlp.UpdateJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Updates a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.update_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateJobTriggerRequest, dict]): + The request object. Request message for + UpdateJobTrigger. + name (str): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + New JobTrigger value. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, job_trigger, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateJobTriggerRequest): + request = dlp.UpdateJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if job_trigger is not None: + request.job_trigger = job_trigger + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def hybrid_inspect_job_trigger(self, + request: Optional[Union[dlp.HybridInspectJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_hybrid_inspect_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.hybrid_inspect_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest, dict]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (str): + Required. Resource name of the trigger to execute a + hybrid inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.HybridInspectJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.HybridInspectJobTriggerRequest): + request = dlp.HybridInspectJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job_trigger(self, + request: Optional[Union[dlp.GetJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Gets a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.get_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetJobTriggerRequest, dict]): + The request object. Request message for GetJobTrigger. + name (str): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetJobTriggerRequest): + request = dlp.GetJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_job_triggers(self, + request: Optional[Union[dlp.ListJobTriggersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobTriggersPager: + r"""Lists job triggers. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_job_triggers(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListJobTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_triggers(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListJobTriggersRequest, dict]): + The request object. Request message for ListJobTriggers. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager: + Response message for ListJobTriggers. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListJobTriggersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListJobTriggersRequest): + request = dlp.ListJobTriggersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_job_triggers] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobTriggersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_job_trigger(self, + request: Optional[Union[dlp.DeleteJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteJobTriggerRequest( + name="name_value", + ) + + # Make the request + client.delete_job_trigger(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteJobTriggerRequest, dict]): + The request object. Request message for + DeleteJobTrigger. + name (str): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteJobTriggerRequest): + request = dlp.DeleteJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def activate_job_trigger(self, + request: Optional[Union[dlp.ActivateJobTriggerRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_activate_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ActivateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.activate_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ActivateJobTriggerRequest, dict]): + The request object. Request message for + ActivateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ActivateJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ActivateJobTriggerRequest): + request = dlp.ActivateJobTriggerRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.activate_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_dlp_job(self, + request: Optional[Union[dlp.CreateDlpJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_job: Optional[dlp.InspectJobConfig] = None, + risk_job: Optional[dlp.RiskAnalysisJobConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDlpJobRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateDlpJobRequest, dict]): + The request object. Request message for + CreateDlpJobRequest. Used to initiate long running jobs + such as calculating risk metrics or inspecting Google + Cloud Storage. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): + An inspection job scans a storage + repository for InfoTypes. + + This corresponds to the ``inspect_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): + A risk analysis job calculates + re-identification risk metrics for a + BigQuery table. + + This corresponds to the ``risk_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_job, risk_job]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateDlpJobRequest): + request = dlp.CreateDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_job is not None: + request.inspect_job = inspect_job + if risk_job is not None: + request.risk_job = risk_job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_dlp_jobs(self, + request: Optional[Union[dlp.ListDlpJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDlpJobsPager: + r"""Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_dlp_jobs(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDlpJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dlp_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListDlpJobsRequest, dict]): + The request object. The request message for listing DLP + jobs. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager: + The response message for listing DLP + jobs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListDlpJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListDlpJobsRequest): + request = dlp.ListDlpJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_dlp_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDlpJobsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_dlp_job(self, + request: Optional[Union[dlp.GetDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDlpJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetDlpJobRequest, dict]): + The request object. The request message for + [DlpJobs.GetDlpJob][]. + name (str): + Required. The name of the DlpJob + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetDlpJobRequest): + request = dlp.GetDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_dlp_job(self, + request: Optional[Union[dlp.DeleteDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDlpJobRequest( + name="name_value", + ) + + # Make the request + client.delete_dlp_job(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteDlpJobRequest, dict]): + The request object. The request message for deleting a + DLP job. + name (str): + Required. The name of the DlpJob + resource to be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteDlpJobRequest): + request = dlp.DeleteDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_dlp_job(self, + request: Optional[Union[dlp.CancelDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_cancel_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CancelDlpJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_dlp_job(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.CancelDlpJobRequest, dict]): + The request object. The request message for canceling a + DLP job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CancelDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CancelDlpJobRequest): + request = dlp.CancelDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_stored_info_type(self, + request: Optional[Union[dlp.CreateStoredInfoTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateStoredInfoTypeRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest, dict]): + The request object. Request message for + CreateStoredInfoType. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Required. Configuration of the + storedInfoType to create. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, config]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateStoredInfoTypeRequest): + request = dlp.CreateStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if config is not None: + request.config = config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_stored_info_type(self, + request: Optional[Union[dlp.UpdateStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = client.update_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest, dict]): + The request object. Request message for + UpdateStoredInfoType. + name (str): + Required. Resource name of organization and + storedInfoType to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Updated configuration for the + storedInfoType. If not provided, a new + version of the storedInfoType will be + created with the existing configuration. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateStoredInfoTypeRequest): + request = dlp.UpdateStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if config is not None: + request.config = config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_stored_info_type(self, + request: Optional[Union[dlp.GetStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Gets a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetStoredInfoTypeRequest, dict]): + The request object. Request message for + GetStoredInfoType. + name (str): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetStoredInfoTypeRequest): + request = dlp.GetStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_stored_info_types(self, + request: Optional[Union[dlp.ListStoredInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListStoredInfoTypesPager: + r"""Lists stored infoTypes. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_stored_info_types(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListStoredInfoTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_stored_info_types(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListStoredInfoTypesRequest, dict]): + The request object. Request message for + ListStoredInfoTypes. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager: + Response message for + ListStoredInfoTypes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListStoredInfoTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListStoredInfoTypesRequest): + request = dlp.ListStoredInfoTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_stored_info_types] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListStoredInfoTypesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_stored_info_type(self, + request: Optional[Union[dlp.DeleteStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + client.delete_stored_info_type(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest, dict]): + The request object. Request message for + DeleteStoredInfoType. + name (str): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteStoredInfoTypeRequest): + request = dlp.DeleteStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def hybrid_inspect_dlp_job(self, + request: Optional[Union[dlp.HybridInspectDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_hybrid_inspect_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectDlpJobRequest( + name="name_value", + ) + + # Make the request + response = client.hybrid_inspect_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.HybridInspectDlpJobRequest, dict]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (str): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.HybridInspectDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.HybridInspectDlpJobRequest): + request = dlp.HybridInspectDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def finish_dlp_job(self, + request: Optional[Union[dlp.FinishDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_finish_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.FinishDlpJobRequest( + name="name_value", + ) + + # Make the request + client.finish_dlp_job(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.FinishDlpJobRequest, dict]): + The request object. The request message for finishing a + DLP hybrid job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.FinishDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.FinishDlpJobRequest): + request = dlp.FinishDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.finish_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "DlpServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DlpServiceClient", +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py new file mode 100644 index 00000000..73a0e48f --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py @@ -0,0 +1,623 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.dlp_v2.types import dlp + + +class ListInspectTemplatesPager: + """A pager for iterating through ``list_inspect_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``inspect_templates`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInspectTemplates`` requests and continue to iterate + through the ``inspect_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListInspectTemplatesResponse], + request: dlp.ListInspectTemplatesRequest, + response: dlp.ListInspectTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListInspectTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListInspectTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.InspectTemplate]: + for page in self.pages: + yield from page.inspect_templates + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInspectTemplatesAsyncPager: + """A pager for iterating through ``list_inspect_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``inspect_templates`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInspectTemplates`` requests and continue to iterate + through the ``inspect_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListInspectTemplatesResponse]], + request: dlp.ListInspectTemplatesRequest, + response: dlp.ListInspectTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListInspectTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListInspectTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.InspectTemplate]: + async def async_generator(): + async for page in self.pages: + for response in page.inspect_templates: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDeidentifyTemplatesPager: + """A pager for iterating through ``list_deidentify_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deidentify_templates`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDeidentifyTemplates`` requests and continue to iterate + through the ``deidentify_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListDeidentifyTemplatesResponse], + request: dlp.ListDeidentifyTemplatesRequest, + response: dlp.ListDeidentifyTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDeidentifyTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListDeidentifyTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.DeidentifyTemplate]: + for page in self.pages: + yield from page.deidentify_templates + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDeidentifyTemplatesAsyncPager: + """A pager for iterating through ``list_deidentify_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``deidentify_templates`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDeidentifyTemplates`` requests and continue to iterate + through the ``deidentify_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListDeidentifyTemplatesResponse]], + request: dlp.ListDeidentifyTemplatesRequest, + response: dlp.ListDeidentifyTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDeidentifyTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListDeidentifyTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.DeidentifyTemplate]: + async def async_generator(): + async for page in self.pages: + for response in page.deidentify_templates: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobTriggersPager: + """A pager for iterating through ``list_job_triggers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``job_triggers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobTriggers`` requests and continue to iterate + through the ``job_triggers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListJobTriggersResponse], + request: dlp.ListJobTriggersRequest, + response: dlp.ListJobTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListJobTriggersRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListJobTriggersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListJobTriggersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListJobTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.JobTrigger]: + for page in self.pages: + yield from page.job_triggers + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobTriggersAsyncPager: + """A pager for iterating through ``list_job_triggers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``job_triggers`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobTriggers`` requests and continue to iterate + through the ``job_triggers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListJobTriggersResponse]], + request: dlp.ListJobTriggersRequest, + response: dlp.ListJobTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListJobTriggersRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListJobTriggersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListJobTriggersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListJobTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.JobTrigger]: + async def async_generator(): + async for page in self.pages: + for response in page.job_triggers: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDlpJobsPager: + """A pager for iterating through ``list_dlp_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDlpJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListDlpJobsResponse], + request: dlp.ListDlpJobsRequest, + response: dlp.ListDlpJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDlpJobsRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDlpJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDlpJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListDlpJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.DlpJob]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDlpJobsAsyncPager: + """A pager for iterating through ``list_dlp_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDlpJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListDlpJobsResponse]], + request: dlp.ListDlpJobsRequest, + response: dlp.ListDlpJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDlpJobsRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDlpJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDlpJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListDlpJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.DlpJob]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListStoredInfoTypesPager: + """A pager for iterating through ``list_stored_info_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``stored_info_types`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListStoredInfoTypes`` requests and continue to iterate + through the ``stored_info_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListStoredInfoTypesResponse], + request: dlp.ListStoredInfoTypesRequest, + response: dlp.ListStoredInfoTypesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListStoredInfoTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListStoredInfoTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.StoredInfoType]: + for page in self.pages: + yield from page.stored_info_types + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListStoredInfoTypesAsyncPager: + """A pager for iterating through ``list_stored_info_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``stored_info_types`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListStoredInfoTypes`` requests and continue to iterate + through the ``stored_info_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListStoredInfoTypesResponse]], + request: dlp.ListStoredInfoTypesRequest, + response: dlp.ListStoredInfoTypesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListStoredInfoTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListStoredInfoTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.StoredInfoType]: + async def async_generator(): + async for page in self.pages: + for response in page.stored_info_types: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py new file mode 100644 index 00000000..df9b4279 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DlpServiceTransport +from .grpc import DlpServiceGrpcTransport +from .grpc_asyncio import DlpServiceGrpcAsyncIOTransport +from .rest import DlpServiceRestTransport +from .rest import DlpServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] +_transport_registry['grpc'] = DlpServiceGrpcTransport +_transport_registry['grpc_asyncio'] = DlpServiceGrpcAsyncIOTransport +_transport_registry['rest'] = DlpServiceRestTransport + +__all__ = ( + 'DlpServiceTransport', + 'DlpServiceGrpcTransport', + 'DlpServiceGrpcAsyncIOTransport', + 'DlpServiceRestTransport', + 'DlpServiceRestInterceptor', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py new file mode 100644 index 00000000..290f4cdf --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py @@ -0,0 +1,751 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dlp_v2 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dlp_v2.types import dlp +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class DlpServiceTransport(abc.ABC): + """Abstract transport class for DlpService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dlp.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.inspect_content: gapic_v1.method.wrap_method( + self.inspect_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.redact_image: gapic_v1.method.wrap_method( + self.redact_image, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.deidentify_content: gapic_v1.method.wrap_method( + self.deidentify_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.reidentify_content: gapic_v1.method.wrap_method( + self.reidentify_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_info_types: gapic_v1.method.wrap_method( + self.list_info_types, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_inspect_template: gapic_v1.method.wrap_method( + self.create_inspect_template, + default_timeout=300.0, + client_info=client_info, + ), + self.update_inspect_template: gapic_v1.method.wrap_method( + self.update_inspect_template, + default_timeout=300.0, + client_info=client_info, + ), + self.get_inspect_template: gapic_v1.method.wrap_method( + self.get_inspect_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_inspect_templates: gapic_v1.method.wrap_method( + self.list_inspect_templates, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_inspect_template: gapic_v1.method.wrap_method( + self.delete_inspect_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_deidentify_template: gapic_v1.method.wrap_method( + self.create_deidentify_template, + default_timeout=300.0, + client_info=client_info, + ), + self.update_deidentify_template: gapic_v1.method.wrap_method( + self.update_deidentify_template, + default_timeout=300.0, + client_info=client_info, + ), + self.get_deidentify_template: gapic_v1.method.wrap_method( + self.get_deidentify_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_deidentify_templates: gapic_v1.method.wrap_method( + self.list_deidentify_templates, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_deidentify_template: gapic_v1.method.wrap_method( + self.delete_deidentify_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_job_trigger: gapic_v1.method.wrap_method( + self.create_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.update_job_trigger: gapic_v1.method.wrap_method( + self.update_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.hybrid_inspect_job_trigger: gapic_v1.method.wrap_method( + self.hybrid_inspect_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.get_job_trigger: gapic_v1.method.wrap_method( + self.get_job_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_job_triggers: gapic_v1.method.wrap_method( + self.list_job_triggers, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_job_trigger: gapic_v1.method.wrap_method( + self.delete_job_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.activate_job_trigger: gapic_v1.method.wrap_method( + self.activate_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.create_dlp_job: gapic_v1.method.wrap_method( + self.create_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.list_dlp_jobs: gapic_v1.method.wrap_method( + self.list_dlp_jobs, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.get_dlp_job: gapic_v1.method.wrap_method( + self.get_dlp_job, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_dlp_job: gapic_v1.method.wrap_method( + self.delete_dlp_job, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.cancel_dlp_job: gapic_v1.method.wrap_method( + self.cancel_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.create_stored_info_type: gapic_v1.method.wrap_method( + self.create_stored_info_type, + default_timeout=300.0, + client_info=client_info, + ), + self.update_stored_info_type: gapic_v1.method.wrap_method( + self.update_stored_info_type, + default_timeout=300.0, + client_info=client_info, + ), + self.get_stored_info_type: gapic_v1.method.wrap_method( + self.get_stored_info_type, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_stored_info_types: gapic_v1.method.wrap_method( + self.list_stored_info_types, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_stored_info_type: gapic_v1.method.wrap_method( + self.delete_stored_info_type, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.hybrid_inspect_dlp_job: gapic_v1.method.wrap_method( + self.hybrid_inspect_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.finish_dlp_job: gapic_v1.method.wrap_method( + self.finish_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + Union[ + dlp.InspectContentResponse, + Awaitable[dlp.InspectContentResponse] + ]]: + raise NotImplementedError() + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + Union[ + dlp.RedactImageResponse, + Awaitable[dlp.RedactImageResponse] + ]]: + raise NotImplementedError() + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + Union[ + dlp.DeidentifyContentResponse, + Awaitable[dlp.DeidentifyContentResponse] + ]]: + raise NotImplementedError() + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + Union[ + dlp.ReidentifyContentResponse, + Awaitable[dlp.ReidentifyContentResponse] + ]]: + raise NotImplementedError() + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + Union[ + dlp.ListInfoTypesResponse, + Awaitable[dlp.ListInfoTypesResponse] + ]]: + raise NotImplementedError() + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + Union[ + dlp.InspectTemplate, + Awaitable[dlp.InspectTemplate] + ]]: + raise NotImplementedError() + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + Union[ + dlp.InspectTemplate, + Awaitable[dlp.InspectTemplate] + ]]: + raise NotImplementedError() + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + Union[ + dlp.InspectTemplate, + Awaitable[dlp.InspectTemplate] + ]]: + raise NotImplementedError() + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + Union[ + dlp.ListInspectTemplatesResponse, + Awaitable[dlp.ListInspectTemplatesResponse] + ]]: + raise NotImplementedError() + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + Union[ + dlp.DeidentifyTemplate, + Awaitable[dlp.DeidentifyTemplate] + ]]: + raise NotImplementedError() + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + Union[ + dlp.DeidentifyTemplate, + Awaitable[dlp.DeidentifyTemplate] + ]]: + raise NotImplementedError() + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + Union[ + dlp.DeidentifyTemplate, + Awaitable[dlp.DeidentifyTemplate] + ]]: + raise NotImplementedError() + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + Union[ + dlp.ListDeidentifyTemplatesResponse, + Awaitable[dlp.ListDeidentifyTemplatesResponse] + ]]: + raise NotImplementedError() + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + Union[ + dlp.JobTrigger, + Awaitable[dlp.JobTrigger] + ]]: + raise NotImplementedError() + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + Union[ + dlp.JobTrigger, + Awaitable[dlp.JobTrigger] + ]]: + raise NotImplementedError() + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + Union[ + dlp.HybridInspectResponse, + Awaitable[dlp.HybridInspectResponse] + ]]: + raise NotImplementedError() + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + Union[ + dlp.JobTrigger, + Awaitable[dlp.JobTrigger] + ]]: + raise NotImplementedError() + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + Union[ + dlp.ListJobTriggersResponse, + Awaitable[dlp.ListJobTriggersResponse] + ]]: + raise NotImplementedError() + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + Union[ + dlp.DlpJob, + Awaitable[dlp.DlpJob] + ]]: + raise NotImplementedError() + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + Union[ + dlp.DlpJob, + Awaitable[dlp.DlpJob] + ]]: + raise NotImplementedError() + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + Union[ + dlp.ListDlpJobsResponse, + Awaitable[dlp.ListDlpJobsResponse] + ]]: + raise NotImplementedError() + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + Union[ + dlp.DlpJob, + Awaitable[dlp.DlpJob] + ]]: + raise NotImplementedError() + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + Union[ + dlp.StoredInfoType, + Awaitable[dlp.StoredInfoType] + ]]: + raise NotImplementedError() + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + Union[ + dlp.StoredInfoType, + Awaitable[dlp.StoredInfoType] + ]]: + raise NotImplementedError() + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + Union[ + dlp.StoredInfoType, + Awaitable[dlp.StoredInfoType] + ]]: + raise NotImplementedError() + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + Union[ + dlp.ListStoredInfoTypesResponse, + Awaitable[dlp.ListStoredInfoTypesResponse] + ]]: + raise NotImplementedError() + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + Union[ + dlp.HybridInspectResponse, + Awaitable[dlp.HybridInspectResponse] + ]]: + raise NotImplementedError() + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'DlpServiceTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py new file mode 100644 index 00000000..81be6a63 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py @@ -0,0 +1,1261 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dlp_v2.types import dlp +from google.protobuf import empty_pb2 # type: ignore +from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO + + +class DlpServiceGrpcTransport(DlpServiceTransport): + """gRPC backend transport for DlpService. + + The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + dlp.InspectContentResponse]: + r"""Return a callable for the inspect content method over gRPC. + + Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + Returns: + Callable[[~.InspectContentRequest], + ~.InspectContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'inspect_content' not in self._stubs: + self._stubs['inspect_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/InspectContent', + request_serializer=dlp.InspectContentRequest.serialize, + response_deserializer=dlp.InspectContentResponse.deserialize, + ) + return self._stubs['inspect_content'] + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + dlp.RedactImageResponse]: + r"""Return a callable for the redact image method over gRPC. + + Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.RedactImageRequest], + ~.RedactImageResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'redact_image' not in self._stubs: + self._stubs['redact_image'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/RedactImage', + request_serializer=dlp.RedactImageRequest.serialize, + response_deserializer=dlp.RedactImageResponse.deserialize, + ) + return self._stubs['redact_image'] + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + dlp.DeidentifyContentResponse]: + r"""Return a callable for the deidentify content method over gRPC. + + De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.DeidentifyContentRequest], + ~.DeidentifyContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'deidentify_content' not in self._stubs: + self._stubs['deidentify_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeidentifyContent', + request_serializer=dlp.DeidentifyContentRequest.serialize, + response_deserializer=dlp.DeidentifyContentResponse.deserialize, + ) + return self._stubs['deidentify_content'] + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + dlp.ReidentifyContentResponse]: + r"""Return a callable for the reidentify content method over gRPC. + + Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + Returns: + Callable[[~.ReidentifyContentRequest], + ~.ReidentifyContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'reidentify_content' not in self._stubs: + self._stubs['reidentify_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ReidentifyContent', + request_serializer=dlp.ReidentifyContentRequest.serialize, + response_deserializer=dlp.ReidentifyContentResponse.deserialize, + ) + return self._stubs['reidentify_content'] + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + dlp.ListInfoTypesResponse]: + r"""Return a callable for the list info types method over gRPC. + + Returns a list of the sensitive information types + that DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + Returns: + Callable[[~.ListInfoTypesRequest], + ~.ListInfoTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_info_types' not in self._stubs: + self._stubs['list_info_types'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInfoTypes', + request_serializer=dlp.ListInfoTypesRequest.serialize, + response_deserializer=dlp.ListInfoTypesResponse.deserialize, + ) + return self._stubs['list_info_types'] + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + dlp.InspectTemplate]: + r"""Return a callable for the create inspect template method over gRPC. + + Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/dlp/docs/creating-templates to + learn more. + + Returns: + Callable[[~.CreateInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_inspect_template' not in self._stubs: + self._stubs['create_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', + request_serializer=dlp.CreateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['create_inspect_template'] + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + dlp.InspectTemplate]: + r"""Return a callable for the update inspect template method over gRPC. + + Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.UpdateInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_inspect_template' not in self._stubs: + self._stubs['update_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', + request_serializer=dlp.UpdateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['update_inspect_template'] + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + dlp.InspectTemplate]: + r"""Return a callable for the get inspect template method over gRPC. + + Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.GetInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_inspect_template' not in self._stubs: + self._stubs['get_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', + request_serializer=dlp.GetInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['get_inspect_template'] + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + dlp.ListInspectTemplatesResponse]: + r"""Return a callable for the list inspect templates method over gRPC. + + Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.ListInspectTemplatesRequest], + ~.ListInspectTemplatesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_inspect_templates' not in self._stubs: + self._stubs['list_inspect_templates'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', + request_serializer=dlp.ListInspectTemplatesRequest.serialize, + response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, + ) + return self._stubs['list_inspect_templates'] + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete inspect template method over gRPC. + + Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.DeleteInspectTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_inspect_template' not in self._stubs: + self._stubs['delete_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', + request_serializer=dlp.DeleteInspectTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_inspect_template'] + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + r"""Return a callable for the create deidentify template method over gRPC. + + Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.CreateDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_deidentify_template' not in self._stubs: + self._stubs['create_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', + request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['create_deidentify_template'] + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + r"""Return a callable for the update deidentify template method over gRPC. + + Updates the DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.UpdateDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_deidentify_template' not in self._stubs: + self._stubs['update_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', + request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['update_deidentify_template'] + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + r"""Return a callable for the get deidentify template method over gRPC. + + Gets a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.GetDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_deidentify_template' not in self._stubs: + self._stubs['get_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', + request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['get_deidentify_template'] + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + dlp.ListDeidentifyTemplatesResponse]: + r"""Return a callable for the list deidentify templates method over gRPC. + + Lists DeidentifyTemplates. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.ListDeidentifyTemplatesRequest], + ~.ListDeidentifyTemplatesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_deidentify_templates' not in self._stubs: + self._stubs['list_deidentify_templates'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', + request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, + response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, + ) + return self._stubs['list_deidentify_templates'] + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete deidentify template method over gRPC. + + Deletes a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.DeleteDeidentifyTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_deidentify_template' not in self._stubs: + self._stubs['delete_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', + request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_deidentify_template'] + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + dlp.JobTrigger]: + r"""Return a callable for the create job trigger method over gRPC. + + Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.CreateJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job_trigger' not in self._stubs: + self._stubs['create_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', + request_serializer=dlp.CreateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['create_job_trigger'] + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + dlp.JobTrigger]: + r"""Return a callable for the update job trigger method over gRPC. + + Updates a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.UpdateJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_job_trigger' not in self._stubs: + self._stubs['update_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', + request_serializer=dlp.UpdateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['update_job_trigger'] + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + dlp.HybridInspectResponse]: + r"""Return a callable for the hybrid inspect job trigger method over gRPC. + + Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + Returns: + Callable[[~.HybridInspectJobTriggerRequest], + ~.HybridInspectResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_job_trigger' not in self._stubs: + self._stubs['hybrid_inspect_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', + request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_job_trigger'] + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + dlp.JobTrigger]: + r"""Return a callable for the get job trigger method over gRPC. + + Gets a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.GetJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_trigger' not in self._stubs: + self._stubs['get_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetJobTrigger', + request_serializer=dlp.GetJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['get_job_trigger'] + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + dlp.ListJobTriggersResponse]: + r"""Return a callable for the list job triggers method over gRPC. + + Lists job triggers. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.ListJobTriggersRequest], + ~.ListJobTriggersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_job_triggers' not in self._stubs: + self._stubs['list_job_triggers'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListJobTriggers', + request_serializer=dlp.ListJobTriggersRequest.serialize, + response_deserializer=dlp.ListJobTriggersResponse.deserialize, + ) + return self._stubs['list_job_triggers'] + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete job trigger method over gRPC. + + Deletes a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.DeleteJobTriggerRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job_trigger' not in self._stubs: + self._stubs['delete_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', + request_serializer=dlp.DeleteJobTriggerRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job_trigger'] + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + dlp.DlpJob]: + r"""Return a callable for the activate job trigger method over gRPC. + + Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + Returns: + Callable[[~.ActivateJobTriggerRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'activate_job_trigger' not in self._stubs: + self._stubs['activate_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', + request_serializer=dlp.ActivateJobTriggerRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['activate_job_trigger'] + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + dlp.DlpJob]: + r"""Return a callable for the create dlp job method over gRPC. + + Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.CreateDlpJobRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_dlp_job' not in self._stubs: + self._stubs['create_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDlpJob', + request_serializer=dlp.CreateDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['create_dlp_job'] + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + dlp.ListDlpJobsResponse]: + r"""Return a callable for the list dlp jobs method over gRPC. + + Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.ListDlpJobsRequest], + ~.ListDlpJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_dlp_jobs' not in self._stubs: + self._stubs['list_dlp_jobs'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDlpJobs', + request_serializer=dlp.ListDlpJobsRequest.serialize, + response_deserializer=dlp.ListDlpJobsResponse.deserialize, + ) + return self._stubs['list_dlp_jobs'] + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + dlp.DlpJob]: + r"""Return a callable for the get dlp job method over gRPC. + + Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.GetDlpJobRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_dlp_job' not in self._stubs: + self._stubs['get_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDlpJob', + request_serializer=dlp.GetDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['get_dlp_job'] + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete dlp job method over gRPC. + + Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.DeleteDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_dlp_job' not in self._stubs: + self._stubs['delete_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', + request_serializer=dlp.DeleteDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_dlp_job'] + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the cancel dlp job method over gRPC. + + Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.CancelDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_dlp_job' not in self._stubs: + self._stubs['cancel_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CancelDlpJob', + request_serializer=dlp.CancelDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['cancel_dlp_job'] + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + dlp.StoredInfoType]: + r"""Return a callable for the create stored info type method over gRPC. + + Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.CreateStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_stored_info_type' not in self._stubs: + self._stubs['create_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', + request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['create_stored_info_type'] + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + dlp.StoredInfoType]: + r"""Return a callable for the update stored info type method over gRPC. + + Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.UpdateStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_stored_info_type' not in self._stubs: + self._stubs['update_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', + request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['update_stored_info_type'] + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + dlp.StoredInfoType]: + r"""Return a callable for the get stored info type method over gRPC. + + Gets a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.GetStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_stored_info_type' not in self._stubs: + self._stubs['get_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', + request_serializer=dlp.GetStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['get_stored_info_type'] + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + dlp.ListStoredInfoTypesResponse]: + r"""Return a callable for the list stored info types method over gRPC. + + Lists stored infoTypes. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.ListStoredInfoTypesRequest], + ~.ListStoredInfoTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_stored_info_types' not in self._stubs: + self._stubs['list_stored_info_types'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', + request_serializer=dlp.ListStoredInfoTypesRequest.serialize, + response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, + ) + return self._stubs['list_stored_info_types'] + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete stored info type method over gRPC. + + Deletes a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.DeleteStoredInfoTypeRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_stored_info_type' not in self._stubs: + self._stubs['delete_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', + request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_stored_info_type'] + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + dlp.HybridInspectResponse]: + r"""Return a callable for the hybrid inspect dlp job method over gRPC. + + Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + Returns: + Callable[[~.HybridInspectDlpJobRequest], + ~.HybridInspectResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_dlp_job' not in self._stubs: + self._stubs['hybrid_inspect_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', + request_serializer=dlp.HybridInspectDlpJobRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_dlp_job'] + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the finish dlp job method over gRPC. + + Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + Returns: + Callable[[~.FinishDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'finish_dlp_job' not in self._stubs: + self._stubs['finish_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/FinishDlpJob', + request_serializer=dlp.FinishDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['finish_dlp_job'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'DlpServiceGrpcTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py new file mode 100644 index 00000000..be0411f7 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py @@ -0,0 +1,1260 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dlp_v2.types import dlp +from google.protobuf import empty_pb2 # type: ignore +from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import DlpServiceGrpcTransport + + +class DlpServiceGrpcAsyncIOTransport(DlpServiceTransport): + """gRPC AsyncIO backend transport for DlpService. + + The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + Awaitable[dlp.InspectContentResponse]]: + r"""Return a callable for the inspect content method over gRPC. + + Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + Returns: + Callable[[~.InspectContentRequest], + Awaitable[~.InspectContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'inspect_content' not in self._stubs: + self._stubs['inspect_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/InspectContent', + request_serializer=dlp.InspectContentRequest.serialize, + response_deserializer=dlp.InspectContentResponse.deserialize, + ) + return self._stubs['inspect_content'] + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + Awaitable[dlp.RedactImageResponse]]: + r"""Return a callable for the redact image method over gRPC. + + Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.RedactImageRequest], + Awaitable[~.RedactImageResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'redact_image' not in self._stubs: + self._stubs['redact_image'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/RedactImage', + request_serializer=dlp.RedactImageRequest.serialize, + response_deserializer=dlp.RedactImageResponse.deserialize, + ) + return self._stubs['redact_image'] + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + Awaitable[dlp.DeidentifyContentResponse]]: + r"""Return a callable for the deidentify content method over gRPC. + + De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.DeidentifyContentRequest], + Awaitable[~.DeidentifyContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'deidentify_content' not in self._stubs: + self._stubs['deidentify_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeidentifyContent', + request_serializer=dlp.DeidentifyContentRequest.serialize, + response_deserializer=dlp.DeidentifyContentResponse.deserialize, + ) + return self._stubs['deidentify_content'] + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + Awaitable[dlp.ReidentifyContentResponse]]: + r"""Return a callable for the reidentify content method over gRPC. + + Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + Returns: + Callable[[~.ReidentifyContentRequest], + Awaitable[~.ReidentifyContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'reidentify_content' not in self._stubs: + self._stubs['reidentify_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ReidentifyContent', + request_serializer=dlp.ReidentifyContentRequest.serialize, + response_deserializer=dlp.ReidentifyContentResponse.deserialize, + ) + return self._stubs['reidentify_content'] + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + Awaitable[dlp.ListInfoTypesResponse]]: + r"""Return a callable for the list info types method over gRPC. + + Returns a list of the sensitive information types + that DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + Returns: + Callable[[~.ListInfoTypesRequest], + Awaitable[~.ListInfoTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_info_types' not in self._stubs: + self._stubs['list_info_types'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInfoTypes', + request_serializer=dlp.ListInfoTypesRequest.serialize, + response_deserializer=dlp.ListInfoTypesResponse.deserialize, + ) + return self._stubs['list_info_types'] + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the create inspect template method over gRPC. + + Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/dlp/docs/creating-templates to + learn more. + + Returns: + Callable[[~.CreateInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_inspect_template' not in self._stubs: + self._stubs['create_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', + request_serializer=dlp.CreateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['create_inspect_template'] + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the update inspect template method over gRPC. + + Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.UpdateInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_inspect_template' not in self._stubs: + self._stubs['update_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', + request_serializer=dlp.UpdateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['update_inspect_template'] + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the get inspect template method over gRPC. + + Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.GetInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_inspect_template' not in self._stubs: + self._stubs['get_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', + request_serializer=dlp.GetInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['get_inspect_template'] + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + Awaitable[dlp.ListInspectTemplatesResponse]]: + r"""Return a callable for the list inspect templates method over gRPC. + + Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.ListInspectTemplatesRequest], + Awaitable[~.ListInspectTemplatesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_inspect_templates' not in self._stubs: + self._stubs['list_inspect_templates'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', + request_serializer=dlp.ListInspectTemplatesRequest.serialize, + response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, + ) + return self._stubs['list_inspect_templates'] + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete inspect template method over gRPC. + + Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.DeleteInspectTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_inspect_template' not in self._stubs: + self._stubs['delete_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', + request_serializer=dlp.DeleteInspectTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_inspect_template'] + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + Awaitable[dlp.DeidentifyTemplate]]: + r"""Return a callable for the create deidentify template method over gRPC. + + Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.CreateDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_deidentify_template' not in self._stubs: + self._stubs['create_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', + request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['create_deidentify_template'] + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + Awaitable[dlp.DeidentifyTemplate]]: + r"""Return a callable for the update deidentify template method over gRPC. + + Updates the DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.UpdateDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_deidentify_template' not in self._stubs: + self._stubs['update_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', + request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['update_deidentify_template'] + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + Awaitable[dlp.DeidentifyTemplate]]: + r"""Return a callable for the get deidentify template method over gRPC. + + Gets a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.GetDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_deidentify_template' not in self._stubs: + self._stubs['get_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', + request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['get_deidentify_template'] + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + Awaitable[dlp.ListDeidentifyTemplatesResponse]]: + r"""Return a callable for the list deidentify templates method over gRPC. + + Lists DeidentifyTemplates. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.ListDeidentifyTemplatesRequest], + Awaitable[~.ListDeidentifyTemplatesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_deidentify_templates' not in self._stubs: + self._stubs['list_deidentify_templates'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', + request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, + response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, + ) + return self._stubs['list_deidentify_templates'] + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete deidentify template method over gRPC. + + Deletes a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.DeleteDeidentifyTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_deidentify_template' not in self._stubs: + self._stubs['delete_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', + request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_deidentify_template'] + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the create job trigger method over gRPC. + + Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.CreateJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job_trigger' not in self._stubs: + self._stubs['create_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', + request_serializer=dlp.CreateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['create_job_trigger'] + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the update job trigger method over gRPC. + + Updates a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.UpdateJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_job_trigger' not in self._stubs: + self._stubs['update_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', + request_serializer=dlp.UpdateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['update_job_trigger'] + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + Awaitable[dlp.HybridInspectResponse]]: + r"""Return a callable for the hybrid inspect job trigger method over gRPC. + + Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + Returns: + Callable[[~.HybridInspectJobTriggerRequest], + Awaitable[~.HybridInspectResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_job_trigger' not in self._stubs: + self._stubs['hybrid_inspect_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', + request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_job_trigger'] + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the get job trigger method over gRPC. + + Gets a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.GetJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_trigger' not in self._stubs: + self._stubs['get_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetJobTrigger', + request_serializer=dlp.GetJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['get_job_trigger'] + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + Awaitable[dlp.ListJobTriggersResponse]]: + r"""Return a callable for the list job triggers method over gRPC. + + Lists job triggers. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.ListJobTriggersRequest], + Awaitable[~.ListJobTriggersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_job_triggers' not in self._stubs: + self._stubs['list_job_triggers'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListJobTriggers', + request_serializer=dlp.ListJobTriggersRequest.serialize, + response_deserializer=dlp.ListJobTriggersResponse.deserialize, + ) + return self._stubs['list_job_triggers'] + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete job trigger method over gRPC. + + Deletes a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.DeleteJobTriggerRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job_trigger' not in self._stubs: + self._stubs['delete_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', + request_serializer=dlp.DeleteJobTriggerRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job_trigger'] + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + Awaitable[dlp.DlpJob]]: + r"""Return a callable for the activate job trigger method over gRPC. + + Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + Returns: + Callable[[~.ActivateJobTriggerRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'activate_job_trigger' not in self._stubs: + self._stubs['activate_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', + request_serializer=dlp.ActivateJobTriggerRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['activate_job_trigger'] + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + Awaitable[dlp.DlpJob]]: + r"""Return a callable for the create dlp job method over gRPC. + + Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.CreateDlpJobRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_dlp_job' not in self._stubs: + self._stubs['create_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDlpJob', + request_serializer=dlp.CreateDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['create_dlp_job'] + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + Awaitable[dlp.ListDlpJobsResponse]]: + r"""Return a callable for the list dlp jobs method over gRPC. + + Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.ListDlpJobsRequest], + Awaitable[~.ListDlpJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_dlp_jobs' not in self._stubs: + self._stubs['list_dlp_jobs'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDlpJobs', + request_serializer=dlp.ListDlpJobsRequest.serialize, + response_deserializer=dlp.ListDlpJobsResponse.deserialize, + ) + return self._stubs['list_dlp_jobs'] + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + Awaitable[dlp.DlpJob]]: + r"""Return a callable for the get dlp job method over gRPC. + + Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.GetDlpJobRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_dlp_job' not in self._stubs: + self._stubs['get_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDlpJob', + request_serializer=dlp.GetDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['get_dlp_job'] + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete dlp job method over gRPC. + + Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.DeleteDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_dlp_job' not in self._stubs: + self._stubs['delete_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', + request_serializer=dlp.DeleteDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_dlp_job'] + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the cancel dlp job method over gRPC. + + Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.CancelDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_dlp_job' not in self._stubs: + self._stubs['cancel_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CancelDlpJob', + request_serializer=dlp.CancelDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['cancel_dlp_job'] + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the create stored info type method over gRPC. + + Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.CreateStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_stored_info_type' not in self._stubs: + self._stubs['create_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', + request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['create_stored_info_type'] + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the update stored info type method over gRPC. + + Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.UpdateStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_stored_info_type' not in self._stubs: + self._stubs['update_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', + request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['update_stored_info_type'] + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the get stored info type method over gRPC. + + Gets a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.GetStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_stored_info_type' not in self._stubs: + self._stubs['get_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', + request_serializer=dlp.GetStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['get_stored_info_type'] + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + Awaitable[dlp.ListStoredInfoTypesResponse]]: + r"""Return a callable for the list stored info types method over gRPC. + + Lists stored infoTypes. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.ListStoredInfoTypesRequest], + Awaitable[~.ListStoredInfoTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_stored_info_types' not in self._stubs: + self._stubs['list_stored_info_types'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', + request_serializer=dlp.ListStoredInfoTypesRequest.serialize, + response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, + ) + return self._stubs['list_stored_info_types'] + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete stored info type method over gRPC. + + Deletes a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.DeleteStoredInfoTypeRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_stored_info_type' not in self._stubs: + self._stubs['delete_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', + request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_stored_info_type'] + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + Awaitable[dlp.HybridInspectResponse]]: + r"""Return a callable for the hybrid inspect dlp job method over gRPC. + + Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + Returns: + Callable[[~.HybridInspectDlpJobRequest], + Awaitable[~.HybridInspectResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_dlp_job' not in self._stubs: + self._stubs['hybrid_inspect_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', + request_serializer=dlp.HybridInspectDlpJobRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_dlp_job'] + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the finish dlp job method over gRPC. + + Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + Returns: + Callable[[~.FinishDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'finish_dlp_job' not in self._stubs: + self._stubs['finish_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/FinishDlpJob', + request_serializer=dlp.FinishDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['finish_dlp_job'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'DlpServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py new file mode 100644 index 00000000..fffd577e --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py @@ -0,0 +1,4324 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dlp_v2.types import dlp +from google.protobuf import empty_pb2 # type: ignore + +from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DlpServiceRestInterceptor: + """Interceptor for DlpService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DlpServiceRestTransport. + + .. code-block:: python + class MyCustomDlpServiceInterceptor(DlpServiceRestInterceptor): + def pre_activate_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_activate_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_cancel_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_create_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_deidentify_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_dlp_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_inspect_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_stored_info_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_deidentify_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_deidentify_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_finish_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_deidentify_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_dlp_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_inspect_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_stored_info_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_hybrid_inspect_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_hybrid_inspect_dlp_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_hybrid_inspect_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_hybrid_inspect_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_inspect_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_inspect_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_deidentify_templates(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_deidentify_templates(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_dlp_jobs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_dlp_jobs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_info_types(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_info_types(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_inspect_templates(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_inspect_templates(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_job_triggers(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_job_triggers(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_stored_info_types(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_stored_info_types(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_redact_image(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_redact_image(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_reidentify_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_reidentify_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_deidentify_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_inspect_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_stored_info_type(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DlpServiceRestTransport(interceptor=MyCustomDlpServiceInterceptor()) + client = DlpServiceClient(transport=transport) + + + """ + def pre_activate_job_trigger(self, request: dlp.ActivateJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ActivateJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for activate_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_activate_job_trigger(self, response: dlp.DlpJob) -> dlp.DlpJob: + """Post-rpc interceptor for activate_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_cancel_dlp_job(self, request: dlp.CancelDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CancelDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_create_deidentify_template(self, request: dlp.CreateDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: + """Post-rpc interceptor for create_deidentify_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_create_dlp_job(self, request: dlp.CreateDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: + """Post-rpc interceptor for create_dlp_job + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_create_inspect_template(self, request: dlp.CreateInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateInspectTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: + """Post-rpc interceptor for create_inspect_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_create_job_trigger(self, request: dlp.CreateJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: + """Post-rpc interceptor for create_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_create_stored_info_type(self, request: dlp.CreateStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: + """Post-rpc interceptor for create_stored_info_type + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_deidentify_content(self, request: dlp.DeidentifyContentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeidentifyContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for deidentify_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_deidentify_content(self, response: dlp.DeidentifyContentResponse) -> dlp.DeidentifyContentResponse: + """Post-rpc interceptor for deidentify_content + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_delete_deidentify_template(self, request: dlp.DeleteDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_dlp_job(self, request: dlp.DeleteDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_inspect_template(self, request: dlp.DeleteInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteInspectTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_job_trigger(self, request: dlp.DeleteJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_stored_info_type(self, request: dlp.DeleteStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_finish_dlp_job(self, request: dlp.FinishDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.FinishDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for finish_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_get_deidentify_template(self, request: dlp.GetDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: + """Post-rpc interceptor for get_deidentify_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_get_dlp_job(self, request: dlp.GetDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: + """Post-rpc interceptor for get_dlp_job + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_get_inspect_template(self, request: dlp.GetInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetInspectTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: + """Post-rpc interceptor for get_inspect_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_get_job_trigger(self, request: dlp.GetJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: + """Post-rpc interceptor for get_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_get_stored_info_type(self, request: dlp.GetStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: + """Post-rpc interceptor for get_stored_info_type + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_hybrid_inspect_dlp_job(self, request: dlp.HybridInspectDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.HybridInspectDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for hybrid_inspect_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_hybrid_inspect_dlp_job(self, response: dlp.HybridInspectResponse) -> dlp.HybridInspectResponse: + """Post-rpc interceptor for hybrid_inspect_dlp_job + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_hybrid_inspect_job_trigger(self, request: dlp.HybridInspectJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.HybridInspectJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for hybrid_inspect_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_hybrid_inspect_job_trigger(self, response: dlp.HybridInspectResponse) -> dlp.HybridInspectResponse: + """Post-rpc interceptor for hybrid_inspect_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_inspect_content(self, request: dlp.InspectContentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.InspectContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for inspect_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_inspect_content(self, response: dlp.InspectContentResponse) -> dlp.InspectContentResponse: + """Post-rpc interceptor for inspect_content + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_list_deidentify_templates(self, request: dlp.ListDeidentifyTemplatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListDeidentifyTemplatesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_deidentify_templates + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_deidentify_templates(self, response: dlp.ListDeidentifyTemplatesResponse) -> dlp.ListDeidentifyTemplatesResponse: + """Post-rpc interceptor for list_deidentify_templates + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_list_dlp_jobs(self, request: dlp.ListDlpJobsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListDlpJobsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_dlp_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_dlp_jobs(self, response: dlp.ListDlpJobsResponse) -> dlp.ListDlpJobsResponse: + """Post-rpc interceptor for list_dlp_jobs + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_list_info_types(self, request: dlp.ListInfoTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListInfoTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_info_types + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_info_types(self, response: dlp.ListInfoTypesResponse) -> dlp.ListInfoTypesResponse: + """Post-rpc interceptor for list_info_types + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_list_inspect_templates(self, request: dlp.ListInspectTemplatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListInspectTemplatesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_inspect_templates + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_inspect_templates(self, response: dlp.ListInspectTemplatesResponse) -> dlp.ListInspectTemplatesResponse: + """Post-rpc interceptor for list_inspect_templates + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_list_job_triggers(self, request: dlp.ListJobTriggersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListJobTriggersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_job_triggers + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_job_triggers(self, response: dlp.ListJobTriggersResponse) -> dlp.ListJobTriggersResponse: + """Post-rpc interceptor for list_job_triggers + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_list_stored_info_types(self, request: dlp.ListStoredInfoTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListStoredInfoTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_stored_info_types + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_stored_info_types(self, response: dlp.ListStoredInfoTypesResponse) -> dlp.ListStoredInfoTypesResponse: + """Post-rpc interceptor for list_stored_info_types + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_redact_image(self, request: dlp.RedactImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.RedactImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for redact_image + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_redact_image(self, response: dlp.RedactImageResponse) -> dlp.RedactImageResponse: + """Post-rpc interceptor for redact_image + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_reidentify_content(self, request: dlp.ReidentifyContentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ReidentifyContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for reidentify_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_reidentify_content(self, response: dlp.ReidentifyContentResponse) -> dlp.ReidentifyContentResponse: + """Post-rpc interceptor for reidentify_content + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_update_deidentify_template(self, request: dlp.UpdateDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: + """Post-rpc interceptor for update_deidentify_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_update_inspect_template(self, request: dlp.UpdateInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateInspectTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: + """Post-rpc interceptor for update_inspect_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_update_job_trigger(self, request: dlp.UpdateJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: + """Post-rpc interceptor for update_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_update_stored_info_type(self, request: dlp.UpdateStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: + """Post-rpc interceptor for update_stored_info_type + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DlpServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DlpServiceRestInterceptor + + +class DlpServiceRestTransport(DlpServiceTransport): + """REST backend transport for DlpService. + + The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[DlpServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DlpServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ActivateJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("ActivateJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ActivateJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DlpJob: + r"""Call the activate job trigger method over HTTP. + + Args: + request (~.dlp.ActivateJobTriggerRequest): + The request object. Request message for + ActivateJobTrigger. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/jobTriggers/*}:activate', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}:activate', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_activate_job_trigger(request, metadata) + pb_request = dlp.ActivateJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DlpJob() + pb_resp = dlp.DlpJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_activate_job_trigger(resp) + return resp + + class _CancelDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("CancelDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.CancelDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the cancel dlp job method over HTTP. + + Args: + request (~.dlp.CancelDlpJobRequest): + The request object. The request message for canceling a + DLP job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/dlpJobs/*}:cancel', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:cancel', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_cancel_dlp_job(request, metadata) + pb_request = dlp.CancelDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _CreateDeidentifyTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("CreateDeidentifyTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.CreateDeidentifyTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DeidentifyTemplate: + r"""Call the create deidentify + template method over HTTP. + + Args: + request (~.dlp.CreateDeidentifyTemplateRequest): + The request object. Request message for + CreateDeidentifyTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*}/deidentifyTemplates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/deidentifyTemplates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/deidentifyTemplates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/deidentifyTemplates', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_deidentify_template(request, metadata) + pb_request = dlp.CreateDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyTemplate() + pb_resp = dlp.DeidentifyTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_deidentify_template(resp) + return resp + + class _CreateDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("CreateDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.CreateDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DlpJob: + r"""Call the create dlp job method over HTTP. + + Args: + request (~.dlp.CreateDlpJobRequest): + The request object. Request message for + CreateDlpJobRequest. Used to initiate + long running jobs such as calculating + risk metrics or inspecting Google Cloud + Storage. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/dlpJobs', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/dlpJobs', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_dlp_job(request, metadata) + pb_request = dlp.CreateDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DlpJob() + pb_resp = dlp.DlpJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_dlp_job(resp) + return resp + + class _CreateInspectTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("CreateInspectTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.CreateInspectTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.InspectTemplate: + r"""Call the create inspect template method over HTTP. + + Args: + request (~.dlp.CreateInspectTemplateRequest): + The request object. Request message for + CreateInspectTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*}/inspectTemplates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/inspectTemplates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/inspectTemplates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/inspectTemplates', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_inspect_template(request, metadata) + pb_request = dlp.CreateInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectTemplate() + pb_resp = dlp.InspectTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_inspect_template(resp) + return resp + + class _CreateJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("CreateJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.CreateJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.JobTrigger: + r"""Call the create job trigger method over HTTP. + + Args: + request (~.dlp.CreateJobTriggerRequest): + The request object. Request message for CreateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/jobTriggers', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/jobTriggers', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/jobTriggers', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_job_trigger(request, metadata) + pb_request = dlp.CreateJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.JobTrigger() + pb_resp = dlp.JobTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_job_trigger(resp) + return resp + + class _CreateStoredInfoType(DlpServiceRestStub): + def __hash__(self): + return hash("CreateStoredInfoType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.CreateStoredInfoTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.StoredInfoType: + r"""Call the create stored info type method over HTTP. + + Args: + request (~.dlp.CreateStoredInfoTypeRequest): + The request object. Request message for + CreateStoredInfoType. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*}/storedInfoTypes', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/storedInfoTypes', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/storedInfoTypes', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/storedInfoTypes', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_stored_info_type(request, metadata) + pb_request = dlp.CreateStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.StoredInfoType() + pb_resp = dlp.StoredInfoType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_stored_info_type(resp) + return resp + + class _DeidentifyContent(DlpServiceRestStub): + def __hash__(self): + return hash("DeidentifyContent") + + def __call__(self, + request: dlp.DeidentifyContentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DeidentifyContentResponse: + r"""Call the deidentify content method over HTTP. + + Args: + request (~.dlp.DeidentifyContentRequest): + The request object. Request to de-identify a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/content:deidentify', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/content:deidentify', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_deidentify_content(request, metadata) + pb_request = dlp.DeidentifyContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyContentResponse() + pb_resp = dlp.DeidentifyContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_deidentify_content(resp) + return resp + + class _DeleteDeidentifyTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteDeidentifyTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.DeleteDeidentifyTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete deidentify + template method over HTTP. + + Args: + request (~.dlp.DeleteDeidentifyTemplateRequest): + The request object. Request message for + DeleteDeidentifyTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_deidentify_template(request, metadata) + pb_request = dlp.DeleteDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.DeleteDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete dlp job method over HTTP. + + Args: + request (~.dlp.DeleteDlpJobRequest): + The request object. The request message for deleting a + DLP job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/dlpJobs/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_dlp_job(request, metadata) + pb_request = dlp.DeleteDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteInspectTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteInspectTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.DeleteInspectTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete inspect template method over HTTP. + + Args: + request (~.dlp.DeleteInspectTemplateRequest): + The request object. Request message for + DeleteInspectTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/inspectTemplates/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_inspect_template(request, metadata) + pb_request = dlp.DeleteInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.DeleteJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete job trigger method over HTTP. + + Args: + request (~.dlp.DeleteJobTriggerRequest): + The request object. Request message for DeleteJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/jobTriggers/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_job_trigger(request, metadata) + pb_request = dlp.DeleteJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteStoredInfoType(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteStoredInfoType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.DeleteStoredInfoTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete stored info type method over HTTP. + + Args: + request (~.dlp.DeleteStoredInfoTypeRequest): + The request object. Request message for + DeleteStoredInfoType. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_stored_info_type(request, metadata) + pb_request = dlp.DeleteStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _FinishDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("FinishDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.FinishDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the finish dlp job method over HTTP. + + Args: + request (~.dlp.FinishDlpJobRequest): + The request object. The request message for finishing a + DLP hybrid job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:finish', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_finish_dlp_job(request, metadata) + pb_request = dlp.FinishDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetDeidentifyTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("GetDeidentifyTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.GetDeidentifyTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DeidentifyTemplate: + r"""Call the get deidentify template method over HTTP. + + Args: + request (~.dlp.GetDeidentifyTemplateRequest): + The request object. Request message for + GetDeidentifyTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', + }, + ] + request, metadata = self._interceptor.pre_get_deidentify_template(request, metadata) + pb_request = dlp.GetDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyTemplate() + pb_resp = dlp.DeidentifyTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_deidentify_template(resp) + return resp + + class _GetDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("GetDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.GetDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DlpJob: + r"""Call the get dlp job method over HTTP. + + Args: + request (~.dlp.GetDlpJobRequest): + The request object. The request message for [DlpJobs.GetDlpJob][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/dlpJobs/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}', + }, + ] + request, metadata = self._interceptor.pre_get_dlp_job(request, metadata) + pb_request = dlp.GetDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DlpJob() + pb_resp = dlp.DlpJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_dlp_job(resp) + return resp + + class _GetInspectTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("GetInspectTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.GetInspectTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.InspectTemplate: + r"""Call the get inspect template method over HTTP. + + Args: + request (~.dlp.GetInspectTemplateRequest): + The request object. Request message for + GetInspectTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/inspectTemplates/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', + }, + ] + request, metadata = self._interceptor.pre_get_inspect_template(request, metadata) + pb_request = dlp.GetInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectTemplate() + pb_resp = dlp.InspectTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_inspect_template(resp) + return resp + + class _GetJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("GetJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.GetJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.JobTrigger: + r"""Call the get job trigger method over HTTP. + + Args: + request (~.dlp.GetJobTriggerRequest): + The request object. Request message for GetJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/jobTriggers/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', + }, + ] + request, metadata = self._interceptor.pre_get_job_trigger(request, metadata) + pb_request = dlp.GetJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.JobTrigger() + pb_resp = dlp.JobTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job_trigger(resp) + return resp + + class _GetStoredInfoType(DlpServiceRestStub): + def __hash__(self): + return hash("GetStoredInfoType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.GetStoredInfoTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.StoredInfoType: + r"""Call the get stored info type method over HTTP. + + Args: + request (~.dlp.GetStoredInfoTypeRequest): + The request object. Request message for + GetStoredInfoType. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', + }, + ] + request, metadata = self._interceptor.pre_get_stored_info_type(request, metadata) + pb_request = dlp.GetStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.StoredInfoType() + pb_resp = dlp.StoredInfoType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_stored_info_type(resp) + return resp + + class _HybridInspectDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("HybridInspectDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.HybridInspectDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.HybridInspectResponse: + r"""Call the hybrid inspect dlp job method over HTTP. + + Args: + request (~.dlp.HybridInspectDlpJobRequest): + The request object. Request to search for potentially + sensitive info in a custom location. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_hybrid_inspect_dlp_job(request, metadata) + pb_request = dlp.HybridInspectDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.HybridInspectResponse() + pb_resp = dlp.HybridInspectResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_hybrid_inspect_dlp_job(resp) + return resp + + class _HybridInspectJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("HybridInspectJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.HybridInspectJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.HybridInspectResponse: + r"""Call the hybrid inspect job + trigger method over HTTP. + + Args: + request (~.dlp.HybridInspectJobTriggerRequest): + The request object. Request to search for potentially + sensitive info in a custom location. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_hybrid_inspect_job_trigger(request, metadata) + pb_request = dlp.HybridInspectJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.HybridInspectResponse() + pb_resp = dlp.HybridInspectResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_hybrid_inspect_job_trigger(resp) + return resp + + class _InspectContent(DlpServiceRestStub): + def __hash__(self): + return hash("InspectContent") + + def __call__(self, + request: dlp.InspectContentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.InspectContentResponse: + r"""Call the inspect content method over HTTP. + + Args: + request (~.dlp.InspectContentRequest): + The request object. Request to search for potentially + sensitive info in a ContentItem. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectContentResponse: + Results of inspecting an item. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/content:inspect', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/content:inspect', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_inspect_content(request, metadata) + pb_request = dlp.InspectContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectContentResponse() + pb_resp = dlp.InspectContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_inspect_content(resp) + return resp + + class _ListDeidentifyTemplates(DlpServiceRestStub): + def __hash__(self): + return hash("ListDeidentifyTemplates") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ListDeidentifyTemplatesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ListDeidentifyTemplatesResponse: + r"""Call the list deidentify templates method over HTTP. + + Args: + request (~.dlp.ListDeidentifyTemplatesRequest): + The request object. Request message for + ListDeidentifyTemplates. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListDeidentifyTemplatesResponse: + Response message for + ListDeidentifyTemplates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*}/deidentifyTemplates', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/deidentifyTemplates', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/deidentifyTemplates', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/deidentifyTemplates', + }, + ] + request, metadata = self._interceptor.pre_list_deidentify_templates(request, metadata) + pb_request = dlp.ListDeidentifyTemplatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListDeidentifyTemplatesResponse() + pb_resp = dlp.ListDeidentifyTemplatesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_deidentify_templates(resp) + return resp + + class _ListDlpJobs(DlpServiceRestStub): + def __hash__(self): + return hash("ListDlpJobs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ListDlpJobsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ListDlpJobsResponse: + r"""Call the list dlp jobs method over HTTP. + + Args: + request (~.dlp.ListDlpJobsRequest): + The request object. The request message for listing DLP + jobs. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListDlpJobsResponse: + The response message for listing DLP + jobs. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/dlpJobs', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/dlpJobs', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/dlpJobs', + }, + ] + request, metadata = self._interceptor.pre_list_dlp_jobs(request, metadata) + pb_request = dlp.ListDlpJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListDlpJobsResponse() + pb_resp = dlp.ListDlpJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_dlp_jobs(resp) + return resp + + class _ListInfoTypes(DlpServiceRestStub): + def __hash__(self): + return hash("ListInfoTypes") + + def __call__(self, + request: dlp.ListInfoTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ListInfoTypesResponse: + r"""Call the list info types method over HTTP. + + Args: + request (~.dlp.ListInfoTypesRequest): + The request object. Request for the list of infoTypes. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/infoTypes', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=locations/*}/infoTypes', + }, + ] + request, metadata = self._interceptor.pre_list_info_types(request, metadata) + pb_request = dlp.ListInfoTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListInfoTypesResponse() + pb_resp = dlp.ListInfoTypesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_info_types(resp) + return resp + + class _ListInspectTemplates(DlpServiceRestStub): + def __hash__(self): + return hash("ListInspectTemplates") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ListInspectTemplatesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ListInspectTemplatesResponse: + r"""Call the list inspect templates method over HTTP. + + Args: + request (~.dlp.ListInspectTemplatesRequest): + The request object. Request message for + ListInspectTemplates. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListInspectTemplatesResponse: + Response message for + ListInspectTemplates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*}/inspectTemplates', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/inspectTemplates', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/inspectTemplates', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/inspectTemplates', + }, + ] + request, metadata = self._interceptor.pre_list_inspect_templates(request, metadata) + pb_request = dlp.ListInspectTemplatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListInspectTemplatesResponse() + pb_resp = dlp.ListInspectTemplatesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_inspect_templates(resp) + return resp + + class _ListJobTriggers(DlpServiceRestStub): + def __hash__(self): + return hash("ListJobTriggers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ListJobTriggersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ListJobTriggersResponse: + r"""Call the list job triggers method over HTTP. + + Args: + request (~.dlp.ListJobTriggersRequest): + The request object. Request message for ListJobTriggers. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListJobTriggersResponse: + Response message for ListJobTriggers. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/jobTriggers', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/jobTriggers', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/jobTriggers', + }, + ] + request, metadata = self._interceptor.pre_list_job_triggers(request, metadata) + pb_request = dlp.ListJobTriggersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListJobTriggersResponse() + pb_resp = dlp.ListJobTriggersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_job_triggers(resp) + return resp + + class _ListStoredInfoTypes(DlpServiceRestStub): + def __hash__(self): + return hash("ListStoredInfoTypes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ListStoredInfoTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ListStoredInfoTypesResponse: + r"""Call the list stored info types method over HTTP. + + Args: + request (~.dlp.ListStoredInfoTypesRequest): + The request object. Request message for + ListStoredInfoTypes. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListStoredInfoTypesResponse: + Response message for + ListStoredInfoTypes. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*}/storedInfoTypes', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/storedInfoTypes', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/storedInfoTypes', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/storedInfoTypes', + }, + ] + request, metadata = self._interceptor.pre_list_stored_info_types(request, metadata) + pb_request = dlp.ListStoredInfoTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListStoredInfoTypesResponse() + pb_resp = dlp.ListStoredInfoTypesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_stored_info_types(resp) + return resp + + class _RedactImage(DlpServiceRestStub): + def __hash__(self): + return hash("RedactImage") + + def __call__(self, + request: dlp.RedactImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.RedactImageResponse: + r"""Call the redact image method over HTTP. + + Args: + request (~.dlp.RedactImageRequest): + The request object. Request to search for potentially + sensitive info in an image and redact it + by covering it with a colored rectangle. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.RedactImageResponse: + Results of redacting an image. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/image:redact', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/image:redact', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_redact_image(request, metadata) + pb_request = dlp.RedactImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.RedactImageResponse() + pb_resp = dlp.RedactImageResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_redact_image(resp) + return resp + + class _ReidentifyContent(DlpServiceRestStub): + def __hash__(self): + return hash("ReidentifyContent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ReidentifyContentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ReidentifyContentResponse: + r"""Call the reidentify content method over HTTP. + + Args: + request (~.dlp.ReidentifyContentRequest): + The request object. Request to re-identify an item. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ReidentifyContentResponse: + Results of re-identifying an item. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/content:reidentify', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/content:reidentify', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_reidentify_content(request, metadata) + pb_request = dlp.ReidentifyContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ReidentifyContentResponse() + pb_resp = dlp.ReidentifyContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reidentify_content(resp) + return resp + + class _UpdateDeidentifyTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("UpdateDeidentifyTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.UpdateDeidentifyTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DeidentifyTemplate: + r"""Call the update deidentify + template method over HTTP. + + Args: + request (~.dlp.UpdateDeidentifyTemplateRequest): + The request object. Request message for + UpdateDeidentifyTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_update_deidentify_template(request, metadata) + pb_request = dlp.UpdateDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyTemplate() + pb_resp = dlp.DeidentifyTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_deidentify_template(resp) + return resp + + class _UpdateInspectTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("UpdateInspectTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.UpdateInspectTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.InspectTemplate: + r"""Call the update inspect template method over HTTP. + + Args: + request (~.dlp.UpdateInspectTemplateRequest): + The request object. Request message for + UpdateInspectTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/inspectTemplates/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_update_inspect_template(request, metadata) + pb_request = dlp.UpdateInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectTemplate() + pb_resp = dlp.InspectTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_inspect_template(resp) + return resp + + class _UpdateJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("UpdateJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.UpdateJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.JobTrigger: + r"""Call the update job trigger method over HTTP. + + Args: + request (~.dlp.UpdateJobTriggerRequest): + The request object. Request message for UpdateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/jobTriggers/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_update_job_trigger(request, metadata) + pb_request = dlp.UpdateJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.JobTrigger() + pb_resp = dlp.JobTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_job_trigger(resp) + return resp + + class _UpdateStoredInfoType(DlpServiceRestStub): + def __hash__(self): + return hash("UpdateStoredInfoType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.UpdateStoredInfoTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.StoredInfoType: + r"""Call the update stored info type method over HTTP. + + Args: + request (~.dlp.UpdateStoredInfoTypeRequest): + The request object. Request message for + UpdateStoredInfoType. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_update_stored_info_type(request, metadata) + pb_request = dlp.UpdateStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.StoredInfoType() + pb_resp = dlp.StoredInfoType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_stored_info_type(resp) + return resp + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + dlp.DlpJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ActivateJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + dlp.DlpJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + dlp.InspectTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + dlp.JobTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + dlp.StoredInfoType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + dlp.DeidentifyContentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeidentifyContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FinishDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + dlp.DlpJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + dlp.InspectTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + dlp.JobTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + dlp.StoredInfoType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + dlp.HybridInspectResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._HybridInspectDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + dlp.HybridInspectResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._HybridInspectJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + dlp.InspectContentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._InspectContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + dlp.ListDeidentifyTemplatesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDeidentifyTemplates(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + dlp.ListDlpJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDlpJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + dlp.ListInfoTypesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInfoTypes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + dlp.ListInspectTemplatesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInspectTemplates(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + dlp.ListJobTriggersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListJobTriggers(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + dlp.ListStoredInfoTypesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListStoredInfoTypes(self._session, self._host, self._interceptor) # type: ignore + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + dlp.RedactImageResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RedactImage(self._session, self._host, self._interceptor) # type: ignore + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + dlp.ReidentifyContentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ReidentifyContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + dlp.InspectTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + dlp.JobTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + dlp.StoredInfoType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'DlpServiceRestTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py new file mode 100644 index 00000000..5bc3d949 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py @@ -0,0 +1,390 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .dlp import ( + Action, + ActivateJobTriggerRequest, + AnalyzeDataSourceRiskDetails, + BoundingBox, + BucketingConfig, + ByteContentItem, + CancelDlpJobRequest, + CharacterMaskConfig, + CharsToIgnore, + Color, + Container, + ContentItem, + ContentLocation, + CreateDeidentifyTemplateRequest, + CreateDlpJobRequest, + CreateInspectTemplateRequest, + CreateJobTriggerRequest, + CreateStoredInfoTypeRequest, + CryptoDeterministicConfig, + CryptoHashConfig, + CryptoKey, + CryptoReplaceFfxFpeConfig, + DataProfileAction, + DataProfileConfigSnapshot, + DataProfileJobConfig, + DataProfileLocation, + DataProfilePubSubCondition, + DataProfilePubSubMessage, + DataRiskLevel, + DateShiftConfig, + DateTime, + DeidentifyConfig, + DeidentifyContentRequest, + DeidentifyContentResponse, + DeidentifyTemplate, + DeleteDeidentifyTemplateRequest, + DeleteDlpJobRequest, + DeleteInspectTemplateRequest, + DeleteJobTriggerRequest, + DeleteStoredInfoTypeRequest, + DlpJob, + DocumentLocation, + Error, + ExcludeByHotword, + ExcludeInfoTypes, + ExclusionRule, + FieldTransformation, + Finding, + FinishDlpJobRequest, + FixedSizeBucketingConfig, + GetDeidentifyTemplateRequest, + GetDlpJobRequest, + GetInspectTemplateRequest, + GetJobTriggerRequest, + GetStoredInfoTypeRequest, + HybridContentItem, + HybridFindingDetails, + HybridInspectDlpJobRequest, + HybridInspectJobTriggerRequest, + HybridInspectResponse, + HybridInspectStatistics, + ImageLocation, + ImageTransformations, + InfoTypeCategory, + InfoTypeDescription, + InfoTypeStats, + InfoTypeSummary, + InfoTypeTransformations, + InspectConfig, + InspectContentRequest, + InspectContentResponse, + InspectDataSourceDetails, + InspectionRule, + InspectionRuleSet, + InspectJobConfig, + InspectResult, + InspectTemplate, + JobTrigger, + KmsWrappedCryptoKey, + LargeCustomDictionaryConfig, + LargeCustomDictionaryStats, + ListDeidentifyTemplatesRequest, + ListDeidentifyTemplatesResponse, + ListDlpJobsRequest, + ListDlpJobsResponse, + ListInfoTypesRequest, + ListInfoTypesResponse, + ListInspectTemplatesRequest, + ListInspectTemplatesResponse, + ListJobTriggersRequest, + ListJobTriggersResponse, + ListStoredInfoTypesRequest, + ListStoredInfoTypesResponse, + Location, + Manual, + MetadataLocation, + OtherInfoTypeSummary, + OutputStorageConfig, + PrimitiveTransformation, + PrivacyMetric, + ProfileStatus, + QuasiId, + QuoteInfo, + Range, + RecordCondition, + RecordLocation, + RecordSuppression, + RecordTransformation, + RecordTransformations, + RedactConfig, + RedactImageRequest, + RedactImageResponse, + ReidentifyContentRequest, + ReidentifyContentResponse, + ReplaceDictionaryConfig, + ReplaceValueConfig, + ReplaceWithInfoTypeConfig, + RiskAnalysisJobConfig, + Schedule, + StatisticalTable, + StorageMetadataLabel, + StoredInfoType, + StoredInfoTypeConfig, + StoredInfoTypeStats, + StoredInfoTypeVersion, + Table, + TableDataProfile, + TableLocation, + TimePartConfig, + TransformationConfig, + TransformationDescription, + TransformationDetails, + TransformationDetailsStorageConfig, + TransformationErrorHandling, + TransformationLocation, + TransformationOverview, + TransformationResultStatus, + TransformationSummary, + TransientCryptoKey, + UnwrappedCryptoKey, + UpdateDeidentifyTemplateRequest, + UpdateInspectTemplateRequest, + UpdateJobTriggerRequest, + UpdateStoredInfoTypeRequest, + Value, + ValueFrequency, + VersionDescription, + ContentOption, + DlpJobType, + EncryptionStatus, + InfoTypeSupportedBy, + MatchingType, + MetadataType, + RelationalOperator, + ResourceVisibility, + StoredInfoTypeState, + TransformationContainerType, + TransformationResultStatusType, + TransformationType, +) +from .storage import ( + BigQueryField, + BigQueryKey, + BigQueryOptions, + BigQueryTable, + CloudStorageFileSet, + CloudStorageOptions, + CloudStoragePath, + CloudStorageRegexFileSet, + CustomInfoType, + DatastoreKey, + DatastoreOptions, + EntityId, + FieldId, + HybridOptions, + InfoType, + Key, + KindExpression, + PartitionId, + RecordKey, + SensitivityScore, + StorageConfig, + StoredType, + TableOptions, + FileType, + Likelihood, +) + +__all__ = ( + 'Action', + 'ActivateJobTriggerRequest', + 'AnalyzeDataSourceRiskDetails', + 'BoundingBox', + 'BucketingConfig', + 'ByteContentItem', + 'CancelDlpJobRequest', + 'CharacterMaskConfig', + 'CharsToIgnore', + 'Color', + 'Container', + 'ContentItem', + 'ContentLocation', + 'CreateDeidentifyTemplateRequest', + 'CreateDlpJobRequest', + 'CreateInspectTemplateRequest', + 'CreateJobTriggerRequest', + 'CreateStoredInfoTypeRequest', + 'CryptoDeterministicConfig', + 'CryptoHashConfig', + 'CryptoKey', + 'CryptoReplaceFfxFpeConfig', + 'DataProfileAction', + 'DataProfileConfigSnapshot', + 'DataProfileJobConfig', + 'DataProfileLocation', + 'DataProfilePubSubCondition', + 'DataProfilePubSubMessage', + 'DataRiskLevel', + 'DateShiftConfig', + 'DateTime', + 'DeidentifyConfig', + 'DeidentifyContentRequest', + 'DeidentifyContentResponse', + 'DeidentifyTemplate', + 'DeleteDeidentifyTemplateRequest', + 'DeleteDlpJobRequest', + 'DeleteInspectTemplateRequest', + 'DeleteJobTriggerRequest', + 'DeleteStoredInfoTypeRequest', + 'DlpJob', + 'DocumentLocation', + 'Error', + 'ExcludeByHotword', + 'ExcludeInfoTypes', + 'ExclusionRule', + 'FieldTransformation', + 'Finding', + 'FinishDlpJobRequest', + 'FixedSizeBucketingConfig', + 'GetDeidentifyTemplateRequest', + 'GetDlpJobRequest', + 'GetInspectTemplateRequest', + 'GetJobTriggerRequest', + 'GetStoredInfoTypeRequest', + 'HybridContentItem', + 'HybridFindingDetails', + 'HybridInspectDlpJobRequest', + 'HybridInspectJobTriggerRequest', + 'HybridInspectResponse', + 'HybridInspectStatistics', + 'ImageLocation', + 'ImageTransformations', + 'InfoTypeCategory', + 'InfoTypeDescription', + 'InfoTypeStats', + 'InfoTypeSummary', + 'InfoTypeTransformations', + 'InspectConfig', + 'InspectContentRequest', + 'InspectContentResponse', + 'InspectDataSourceDetails', + 'InspectionRule', + 'InspectionRuleSet', + 'InspectJobConfig', + 'InspectResult', + 'InspectTemplate', + 'JobTrigger', + 'KmsWrappedCryptoKey', + 'LargeCustomDictionaryConfig', + 'LargeCustomDictionaryStats', + 'ListDeidentifyTemplatesRequest', + 'ListDeidentifyTemplatesResponse', + 'ListDlpJobsRequest', + 'ListDlpJobsResponse', + 'ListInfoTypesRequest', + 'ListInfoTypesResponse', + 'ListInspectTemplatesRequest', + 'ListInspectTemplatesResponse', + 'ListJobTriggersRequest', + 'ListJobTriggersResponse', + 'ListStoredInfoTypesRequest', + 'ListStoredInfoTypesResponse', + 'Location', + 'Manual', + 'MetadataLocation', + 'OtherInfoTypeSummary', + 'OutputStorageConfig', + 'PrimitiveTransformation', + 'PrivacyMetric', + 'ProfileStatus', + 'QuasiId', + 'QuoteInfo', + 'Range', + 'RecordCondition', + 'RecordLocation', + 'RecordSuppression', + 'RecordTransformation', + 'RecordTransformations', + 'RedactConfig', + 'RedactImageRequest', + 'RedactImageResponse', + 'ReidentifyContentRequest', + 'ReidentifyContentResponse', + 'ReplaceDictionaryConfig', + 'ReplaceValueConfig', + 'ReplaceWithInfoTypeConfig', + 'RiskAnalysisJobConfig', + 'Schedule', + 'StatisticalTable', + 'StorageMetadataLabel', + 'StoredInfoType', + 'StoredInfoTypeConfig', + 'StoredInfoTypeStats', + 'StoredInfoTypeVersion', + 'Table', + 'TableDataProfile', + 'TableLocation', + 'TimePartConfig', + 'TransformationConfig', + 'TransformationDescription', + 'TransformationDetails', + 'TransformationDetailsStorageConfig', + 'TransformationErrorHandling', + 'TransformationLocation', + 'TransformationOverview', + 'TransformationResultStatus', + 'TransformationSummary', + 'TransientCryptoKey', + 'UnwrappedCryptoKey', + 'UpdateDeidentifyTemplateRequest', + 'UpdateInspectTemplateRequest', + 'UpdateJobTriggerRequest', + 'UpdateStoredInfoTypeRequest', + 'Value', + 'ValueFrequency', + 'VersionDescription', + 'ContentOption', + 'DlpJobType', + 'EncryptionStatus', + 'InfoTypeSupportedBy', + 'MatchingType', + 'MetadataType', + 'RelationalOperator', + 'ResourceVisibility', + 'StoredInfoTypeState', + 'TransformationContainerType', + 'TransformationResultStatusType', + 'TransformationType', + 'BigQueryField', + 'BigQueryKey', + 'BigQueryOptions', + 'BigQueryTable', + 'CloudStorageFileSet', + 'CloudStorageOptions', + 'CloudStoragePath', + 'CloudStorageRegexFileSet', + 'CustomInfoType', + 'DatastoreKey', + 'DatastoreOptions', + 'EntityId', + 'FieldId', + 'HybridOptions', + 'InfoType', + 'Key', + 'KindExpression', + 'PartitionId', + 'RecordKey', + 'SensitivityScore', + 'StorageConfig', + 'StoredType', + 'TableOptions', + 'FileType', + 'Likelihood', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py new file mode 100644 index 00000000..d82444a2 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py @@ -0,0 +1,8846 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dlp_v2.types import storage +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.privacy.dlp.v2', + manifest={ + 'TransformationResultStatusType', + 'TransformationContainerType', + 'TransformationType', + 'RelationalOperator', + 'MatchingType', + 'ContentOption', + 'MetadataType', + 'InfoTypeSupportedBy', + 'DlpJobType', + 'StoredInfoTypeState', + 'ResourceVisibility', + 'EncryptionStatus', + 'ExcludeInfoTypes', + 'ExcludeByHotword', + 'ExclusionRule', + 'InspectionRule', + 'InspectionRuleSet', + 'InspectConfig', + 'ByteContentItem', + 'ContentItem', + 'Table', + 'InspectResult', + 'Finding', + 'Location', + 'ContentLocation', + 'MetadataLocation', + 'StorageMetadataLabel', + 'DocumentLocation', + 'RecordLocation', + 'TableLocation', + 'Container', + 'Range', + 'ImageLocation', + 'BoundingBox', + 'RedactImageRequest', + 'Color', + 'RedactImageResponse', + 'DeidentifyContentRequest', + 'DeidentifyContentResponse', + 'ReidentifyContentRequest', + 'ReidentifyContentResponse', + 'InspectContentRequest', + 'InspectContentResponse', + 'OutputStorageConfig', + 'InfoTypeStats', + 'InspectDataSourceDetails', + 'HybridInspectStatistics', + 'InfoTypeDescription', + 'InfoTypeCategory', + 'VersionDescription', + 'ListInfoTypesRequest', + 'ListInfoTypesResponse', + 'RiskAnalysisJobConfig', + 'QuasiId', + 'StatisticalTable', + 'PrivacyMetric', + 'AnalyzeDataSourceRiskDetails', + 'ValueFrequency', + 'Value', + 'QuoteInfo', + 'DateTime', + 'DeidentifyConfig', + 'ImageTransformations', + 'TransformationErrorHandling', + 'PrimitiveTransformation', + 'TimePartConfig', + 'CryptoHashConfig', + 'CryptoDeterministicConfig', + 'ReplaceValueConfig', + 'ReplaceDictionaryConfig', + 'ReplaceWithInfoTypeConfig', + 'RedactConfig', + 'CharsToIgnore', + 'CharacterMaskConfig', + 'FixedSizeBucketingConfig', + 'BucketingConfig', + 'CryptoReplaceFfxFpeConfig', + 'CryptoKey', + 'TransientCryptoKey', + 'UnwrappedCryptoKey', + 'KmsWrappedCryptoKey', + 'DateShiftConfig', + 'InfoTypeTransformations', + 'FieldTransformation', + 'RecordTransformations', + 'RecordSuppression', + 'RecordCondition', + 'TransformationOverview', + 'TransformationSummary', + 'TransformationDescription', + 'TransformationDetails', + 'TransformationLocation', + 'RecordTransformation', + 'TransformationResultStatus', + 'TransformationDetailsStorageConfig', + 'Schedule', + 'Manual', + 'InspectTemplate', + 'DeidentifyTemplate', + 'Error', + 'JobTrigger', + 'Action', + 'TransformationConfig', + 'CreateInspectTemplateRequest', + 'UpdateInspectTemplateRequest', + 'GetInspectTemplateRequest', + 'ListInspectTemplatesRequest', + 'ListInspectTemplatesResponse', + 'DeleteInspectTemplateRequest', + 'CreateJobTriggerRequest', + 'ActivateJobTriggerRequest', + 'UpdateJobTriggerRequest', + 'GetJobTriggerRequest', + 'CreateDlpJobRequest', + 'ListJobTriggersRequest', + 'ListJobTriggersResponse', + 'DeleteJobTriggerRequest', + 'InspectJobConfig', + 'DataProfileAction', + 'DataProfileJobConfig', + 'DataProfileLocation', + 'DlpJob', + 'GetDlpJobRequest', + 'ListDlpJobsRequest', + 'ListDlpJobsResponse', + 'CancelDlpJobRequest', + 'FinishDlpJobRequest', + 'DeleteDlpJobRequest', + 'CreateDeidentifyTemplateRequest', + 'UpdateDeidentifyTemplateRequest', + 'GetDeidentifyTemplateRequest', + 'ListDeidentifyTemplatesRequest', + 'ListDeidentifyTemplatesResponse', + 'DeleteDeidentifyTemplateRequest', + 'LargeCustomDictionaryConfig', + 'LargeCustomDictionaryStats', + 'StoredInfoTypeConfig', + 'StoredInfoTypeStats', + 'StoredInfoTypeVersion', + 'StoredInfoType', + 'CreateStoredInfoTypeRequest', + 'UpdateStoredInfoTypeRequest', + 'GetStoredInfoTypeRequest', + 'ListStoredInfoTypesRequest', + 'ListStoredInfoTypesResponse', + 'DeleteStoredInfoTypeRequest', + 'HybridInspectJobTriggerRequest', + 'HybridInspectDlpJobRequest', + 'HybridContentItem', + 'HybridFindingDetails', + 'HybridInspectResponse', + 'DataRiskLevel', + 'DataProfileConfigSnapshot', + 'TableDataProfile', + 'ProfileStatus', + 'InfoTypeSummary', + 'OtherInfoTypeSummary', + 'DataProfilePubSubCondition', + 'DataProfilePubSubMessage', + }, +) + + +class TransformationResultStatusType(proto.Enum): + r"""Enum of possible outcomes of transformations. SUCCESS if + transformation and storing of transformation was successful, + otherwise, reason for not transforming. + + Values: + STATE_TYPE_UNSPECIFIED (0): + No description available. + INVALID_TRANSFORM (1): + This will be set when a finding could not be + transformed (i.e. outside user set bucket + range). + BIGQUERY_MAX_ROW_SIZE_EXCEEDED (2): + This will be set when a BigQuery + transformation was successful but could not be + stored back in BigQuery because the transformed + row exceeds BigQuery's max row size. + METADATA_UNRETRIEVABLE (3): + This will be set when there is a finding in + the custom metadata of a file, but at the write + time of the transformed file, this key / value + pair is unretrievable. + SUCCESS (4): + This will be set when the transformation and + storing of it is successful. + """ + STATE_TYPE_UNSPECIFIED = 0 + INVALID_TRANSFORM = 1 + BIGQUERY_MAX_ROW_SIZE_EXCEEDED = 2 + METADATA_UNRETRIEVABLE = 3 + SUCCESS = 4 + + +class TransformationContainerType(proto.Enum): + r"""Describes functionality of a given container in its original + format. + + Values: + TRANSFORM_UNKNOWN_CONTAINER (0): + No description available. + TRANSFORM_BODY (1): + No description available. + TRANSFORM_METADATA (2): + No description available. + TRANSFORM_TABLE (3): + No description available. + """ + TRANSFORM_UNKNOWN_CONTAINER = 0 + TRANSFORM_BODY = 1 + TRANSFORM_METADATA = 2 + TRANSFORM_TABLE = 3 + + +class TransformationType(proto.Enum): + r"""An enum of rules that can be used to transform a value. Can be a + record suppression, or one of the transformation rules specified + under ``PrimitiveTransformation``. + + Values: + TRANSFORMATION_TYPE_UNSPECIFIED (0): + Unused + RECORD_SUPPRESSION (1): + Record suppression + REPLACE_VALUE (2): + Replace value + REPLACE_DICTIONARY (15): + Replace value using a dictionary. + REDACT (3): + Redact + CHARACTER_MASK (4): + Character mask + CRYPTO_REPLACE_FFX_FPE (5): + FFX-FPE + FIXED_SIZE_BUCKETING (6): + Fixed size bucketing + BUCKETING (7): + Bucketing + REPLACE_WITH_INFO_TYPE (8): + Replace with info type + TIME_PART (9): + Time part + CRYPTO_HASH (10): + Crypto hash + DATE_SHIFT (12): + Date shift + CRYPTO_DETERMINISTIC_CONFIG (13): + Deterministic crypto + REDACT_IMAGE (14): + Redact image + """ + TRANSFORMATION_TYPE_UNSPECIFIED = 0 + RECORD_SUPPRESSION = 1 + REPLACE_VALUE = 2 + REPLACE_DICTIONARY = 15 + REDACT = 3 + CHARACTER_MASK = 4 + CRYPTO_REPLACE_FFX_FPE = 5 + FIXED_SIZE_BUCKETING = 6 + BUCKETING = 7 + REPLACE_WITH_INFO_TYPE = 8 + TIME_PART = 9 + CRYPTO_HASH = 10 + DATE_SHIFT = 12 + CRYPTO_DETERMINISTIC_CONFIG = 13 + REDACT_IMAGE = 14 + + +class RelationalOperator(proto.Enum): + r"""Operators available for comparing the value of fields. + + Values: + RELATIONAL_OPERATOR_UNSPECIFIED (0): + Unused + EQUAL_TO (1): + Equal. Attempts to match even with + incompatible types. + NOT_EQUAL_TO (2): + Not equal to. Attempts to match even with + incompatible types. + GREATER_THAN (3): + Greater than. + LESS_THAN (4): + Less than. + GREATER_THAN_OR_EQUALS (5): + Greater than or equals. + LESS_THAN_OR_EQUALS (6): + Less than or equals. + EXISTS (7): + Exists + """ + RELATIONAL_OPERATOR_UNSPECIFIED = 0 + EQUAL_TO = 1 + NOT_EQUAL_TO = 2 + GREATER_THAN = 3 + LESS_THAN = 4 + GREATER_THAN_OR_EQUALS = 5 + LESS_THAN_OR_EQUALS = 6 + EXISTS = 7 + + +class MatchingType(proto.Enum): + r"""Type of the match which can be applied to different ways of + matching, like Dictionary, regular expression and intersecting + with findings of another info type. + + Values: + MATCHING_TYPE_UNSPECIFIED (0): + Invalid. + MATCHING_TYPE_FULL_MATCH (1): + Full match. + - Dictionary: join of Dictionary results matched + complete finding quote - Regex: all regex + matches fill a finding quote start to end - + Exclude info type: completely inside affecting + info types findings + MATCHING_TYPE_PARTIAL_MATCH (2): + Partial match. + - Dictionary: at least one of the tokens in the + finding matches - Regex: substring of the + finding matches + - Exclude info type: intersects with affecting + info types findings + MATCHING_TYPE_INVERSE_MATCH (3): + Inverse match. + - Dictionary: no tokens in the finding match the + dictionary - Regex: finding doesn't match the + regex + - Exclude info type: no intersection with + affecting info types findings + """ + MATCHING_TYPE_UNSPECIFIED = 0 + MATCHING_TYPE_FULL_MATCH = 1 + MATCHING_TYPE_PARTIAL_MATCH = 2 + MATCHING_TYPE_INVERSE_MATCH = 3 + + +class ContentOption(proto.Enum): + r"""Deprecated and unused. + + Values: + CONTENT_UNSPECIFIED (0): + Includes entire content of a file or a data + stream. + CONTENT_TEXT (1): + Text content within the data, excluding any + metadata. + CONTENT_IMAGE (2): + Images found in the data. + """ + CONTENT_UNSPECIFIED = 0 + CONTENT_TEXT = 1 + CONTENT_IMAGE = 2 + + +class MetadataType(proto.Enum): + r"""Type of metadata containing the finding. + + Values: + METADATATYPE_UNSPECIFIED (0): + Unused + STORAGE_METADATA (2): + General file metadata provided by Cloud + Storage. + """ + METADATATYPE_UNSPECIFIED = 0 + STORAGE_METADATA = 2 + + +class InfoTypeSupportedBy(proto.Enum): + r"""Parts of the APIs which use certain infoTypes. + + Values: + ENUM_TYPE_UNSPECIFIED (0): + Unused. + INSPECT (1): + Supported by the inspect operations. + RISK_ANALYSIS (2): + Supported by the risk analysis operations. + """ + ENUM_TYPE_UNSPECIFIED = 0 + INSPECT = 1 + RISK_ANALYSIS = 2 + + +class DlpJobType(proto.Enum): + r"""An enum to represent the various types of DLP jobs. + + Values: + DLP_JOB_TYPE_UNSPECIFIED (0): + Defaults to INSPECT_JOB. + INSPECT_JOB (1): + The job inspected Google Cloud for sensitive + data. + RISK_ANALYSIS_JOB (2): + The job executed a Risk Analysis computation. + """ + DLP_JOB_TYPE_UNSPECIFIED = 0 + INSPECT_JOB = 1 + RISK_ANALYSIS_JOB = 2 + + +class StoredInfoTypeState(proto.Enum): + r"""State of a StoredInfoType version. + + Values: + STORED_INFO_TYPE_STATE_UNSPECIFIED (0): + Unused + PENDING (1): + StoredInfoType version is being created. + READY (2): + StoredInfoType version is ready for use. + FAILED (3): + StoredInfoType creation failed. All relevant error messages + are returned in the ``StoredInfoTypeVersion`` message. + INVALID (4): + StoredInfoType is no longer valid because artifacts stored + in user-controlled storage were modified. To fix an invalid + StoredInfoType, use the ``UpdateStoredInfoType`` method to + create a new version. + """ + STORED_INFO_TYPE_STATE_UNSPECIFIED = 0 + PENDING = 1 + READY = 2 + FAILED = 3 + INVALID = 4 + + +class ResourceVisibility(proto.Enum): + r"""How broadly a resource has been shared. New items may be + added over time. A higher number means more restricted. + + Values: + RESOURCE_VISIBILITY_UNSPECIFIED (0): + Unused. + RESOURCE_VISIBILITY_PUBLIC (10): + Visible to any user. + RESOURCE_VISIBILITY_RESTRICTED (20): + Visible only to specific users. + """ + RESOURCE_VISIBILITY_UNSPECIFIED = 0 + RESOURCE_VISIBILITY_PUBLIC = 10 + RESOURCE_VISIBILITY_RESTRICTED = 20 + + +class EncryptionStatus(proto.Enum): + r"""How a resource is encrypted. + + Values: + ENCRYPTION_STATUS_UNSPECIFIED (0): + Unused. + ENCRYPTION_GOOGLE_MANAGED (1): + Google manages server-side encryption keys on + your behalf. + ENCRYPTION_CUSTOMER_MANAGED (2): + Customer provides the key. + """ + ENCRYPTION_STATUS_UNSPECIFIED = 0 + ENCRYPTION_GOOGLE_MANAGED = 1 + ENCRYPTION_CUSTOMER_MANAGED = 2 + + +class ExcludeInfoTypes(proto.Message): + r"""List of excluded infoTypes. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + InfoType list in ExclusionRule rule drops a finding when it + overlaps or contained within with a finding of an infoType + from this list. For example, for + ``InspectionRuleSet.info_types`` containing + "PHONE_NUMBER"``and``\ exclusion_rule\ ``containing``\ exclude_info_types.info_types\` + with "EMAIL_ADDRESS" the phone number findings are dropped + if they overlap with EMAIL_ADDRESS finding. That leads to + "555-222-2222@example.org" to generate only a single + finding, namely email address. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + + +class ExcludeByHotword(proto.Message): + r"""The rule to exclude findings based on a hotword. For record + inspection of tables, column names are considered hotwords. An + example of this is to exclude a finding if a BigQuery column + matches a specific pattern. + + Attributes: + hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression pattern defining what + qualifies as a hotword. + proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): + Range of characters within which the entire + hotword must reside. The total length of the + window cannot exceed 1000 characters. The + windowBefore property in proximity should be set + to 1 if the hotword needs to be included in a + column header. + """ + + hotword_regex: storage.CustomInfoType.Regex = proto.Field( + proto.MESSAGE, + number=1, + message=storage.CustomInfoType.Regex, + ) + proximity: storage.CustomInfoType.DetectionRule.Proximity = proto.Field( + proto.MESSAGE, + number=2, + message=storage.CustomInfoType.DetectionRule.Proximity, + ) + + +class ExclusionRule(proto.Message): + r"""The rule that specifies conditions when findings of infoTypes + specified in ``InspectionRuleSet`` are removed from results. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): + Dictionary which defines the rule. + + This field is a member of `oneof`_ ``type``. + regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression which defines the rule. + + This field is a member of `oneof`_ ``type``. + exclude_info_types (google.cloud.dlp_v2.types.ExcludeInfoTypes): + Set of infoTypes for which findings would + affect this rule. + + This field is a member of `oneof`_ ``type``. + exclude_by_hotword (google.cloud.dlp_v2.types.ExcludeByHotword): + Drop if the hotword rule is contained in the + proximate context. For tabular data, the context + includes the column name. + + This field is a member of `oneof`_ ``type``. + matching_type (google.cloud.dlp_v2.types.MatchingType): + How the rule is applied, see MatchingType + documentation for details. + """ + + dictionary: storage.CustomInfoType.Dictionary = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.CustomInfoType.Dictionary, + ) + regex: storage.CustomInfoType.Regex = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message=storage.CustomInfoType.Regex, + ) + exclude_info_types: 'ExcludeInfoTypes' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='ExcludeInfoTypes', + ) + exclude_by_hotword: 'ExcludeByHotword' = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message='ExcludeByHotword', + ) + matching_type: 'MatchingType' = proto.Field( + proto.ENUM, + number=4, + enum='MatchingType', + ) + + +class InspectionRule(proto.Message): + r"""A single inspection rule to be applied to infoTypes, specified in + ``InspectionRuleSet``. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): + Hotword-based detection rule. + + This field is a member of `oneof`_ ``type``. + exclusion_rule (google.cloud.dlp_v2.types.ExclusionRule): + Exclusion rule. + + This field is a member of `oneof`_ ``type``. + """ + + hotword_rule: storage.CustomInfoType.DetectionRule.HotwordRule = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.CustomInfoType.DetectionRule.HotwordRule, + ) + exclusion_rule: 'ExclusionRule' = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message='ExclusionRule', + ) + + +class InspectionRuleSet(proto.Message): + r"""Rule set for modifying a set of infoTypes to alter behavior + under certain circumstances, depending on the specific details + of the rules within the set. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + List of infoTypes this rule set is applied + to. + rules (MutableSequence[google.cloud.dlp_v2.types.InspectionRule]): + Set of rules to be applied to infoTypes. The + rules are applied in order. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + rules: MutableSequence['InspectionRule'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='InspectionRule', + ) + + +class InspectConfig(proto.Message): + r"""Configuration description of the scanning process. When used with + redactContent only info_types and min_likelihood are currently used. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + Restricts what info_types to look for. The values must + correspond to InfoType values returned by ListInfoTypes or + listed at + https://cloud.google.com/dlp/docs/infotypes-reference. + + When no InfoTypes or CustomInfoTypes are specified in a + request, the system may automatically choose what detectors + to run. By default this may be all types, but may change + over time as detectors are updated. + + If you need precise control and predictability as to what + detectors are run you should specify specific InfoTypes + listed in the reference, otherwise a default list will be + used, which may change over time. + min_likelihood (google.cloud.dlp_v2.types.Likelihood): + Only returns findings equal or above this + threshold. The default is POSSIBLE. + See https://cloud.google.com/dlp/docs/likelihood + to learn more. + limits (google.cloud.dlp_v2.types.InspectConfig.FindingLimits): + Configuration to control the number of findings returned. + This is not used for data profiling. + + When redacting sensitive data from images, finding limits + don't apply. They can cause unexpected or inconsistent + results, where only some data is redacted. Don't include + finding limits in + [RedactImage][google.privacy.dlp.v2.DlpService.RedactImage] + requests. Otherwise, Cloud DLP returns an error. + include_quote (bool): + When true, a contextual quote from the data that triggered a + finding is included in the response; see + [Finding.quote][google.privacy.dlp.v2.Finding.quote]. This + is not used for data profiling. + exclude_info_types (bool): + When true, excludes type information of the + findings. This is not used for data profiling. + custom_info_types (MutableSequence[google.cloud.dlp_v2.types.CustomInfoType]): + CustomInfoTypes provided by the user. See + https://cloud.google.com/dlp/docs/creating-custom-infotypes + to learn more. + content_options (MutableSequence[google.cloud.dlp_v2.types.ContentOption]): + Deprecated and unused. + rule_set (MutableSequence[google.cloud.dlp_v2.types.InspectionRuleSet]): + Set of rules to apply to the findings for + this InspectConfig. Exclusion rules, contained + in the set are executed in the end, other rules + are executed in the order they are specified for + each info type. + """ + + class FindingLimits(proto.Message): + r"""Configuration to control the number of findings returned for + inspection. This is not used for de-identification or data + profiling. + + When redacting sensitive data from images, finding limits don't + apply. They can cause unexpected or inconsistent results, where only + some data is redacted. Don't include finding limits in + [RedactImage][google.privacy.dlp.v2.DlpService.RedactImage] + requests. Otherwise, Cloud DLP returns an error. + + Attributes: + max_findings_per_item (int): + Max number of findings that will be returned for each item + scanned. When set within ``InspectJobConfig``, the maximum + returned is 2000 regardless if this is set higher. When set + within ``InspectContentRequest``, this field is ignored. + max_findings_per_request (int): + Max number of findings that will be returned per + request/job. When set within ``InspectContentRequest``, the + maximum returned is 2000 regardless if this is set higher. + max_findings_per_info_type (MutableSequence[google.cloud.dlp_v2.types.InspectConfig.FindingLimits.InfoTypeLimit]): + Configuration of findings limit given for + specified infoTypes. + """ + + class InfoTypeLimit(proto.Message): + r"""Max findings configuration per infoType, per content item or + long running DlpJob. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Type of information the findings limit applies to. Only one + limit per info_type should be provided. If InfoTypeLimit + does not have an info_type, the DLP API applies the limit + against all info_types that are found but not specified in + another InfoTypeLimit. + max_findings (int): + Max findings limit for the given infoType. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + max_findings: int = proto.Field( + proto.INT32, + number=2, + ) + + max_findings_per_item: int = proto.Field( + proto.INT32, + number=1, + ) + max_findings_per_request: int = proto.Field( + proto.INT32, + number=2, + ) + max_findings_per_info_type: MutableSequence['InspectConfig.FindingLimits.InfoTypeLimit'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='InspectConfig.FindingLimits.InfoTypeLimit', + ) + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + min_likelihood: storage.Likelihood = proto.Field( + proto.ENUM, + number=2, + enum=storage.Likelihood, + ) + limits: FindingLimits = proto.Field( + proto.MESSAGE, + number=3, + message=FindingLimits, + ) + include_quote: bool = proto.Field( + proto.BOOL, + number=4, + ) + exclude_info_types: bool = proto.Field( + proto.BOOL, + number=5, + ) + custom_info_types: MutableSequence[storage.CustomInfoType] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=storage.CustomInfoType, + ) + content_options: MutableSequence['ContentOption'] = proto.RepeatedField( + proto.ENUM, + number=8, + enum='ContentOption', + ) + rule_set: MutableSequence['InspectionRuleSet'] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message='InspectionRuleSet', + ) + + +class ByteContentItem(proto.Message): + r"""Container for bytes to inspect or redact. + + Attributes: + type_ (google.cloud.dlp_v2.types.ByteContentItem.BytesType): + The type of data stored in the bytes string. Default will be + TEXT_UTF8. + data (bytes): + Content data to inspect or redact. + """ + class BytesType(proto.Enum): + r"""The type of data being sent for inspection. To learn more, see + `Supported file + types `__. + + Values: + BYTES_TYPE_UNSPECIFIED (0): + Unused + IMAGE (6): + Any image type. + IMAGE_JPEG (1): + jpeg + IMAGE_BMP (2): + bmp + IMAGE_PNG (3): + png + IMAGE_SVG (4): + svg + TEXT_UTF8 (5): + plain text + WORD_DOCUMENT (7): + docx, docm, dotx, dotm + PDF (8): + pdf + POWERPOINT_DOCUMENT (9): + pptx, pptm, potx, potm, pot + EXCEL_DOCUMENT (10): + xlsx, xlsm, xltx, xltm + AVRO (11): + avro + CSV (12): + csv + TSV (13): + tsv + """ + BYTES_TYPE_UNSPECIFIED = 0 + IMAGE = 6 + IMAGE_JPEG = 1 + IMAGE_BMP = 2 + IMAGE_PNG = 3 + IMAGE_SVG = 4 + TEXT_UTF8 = 5 + WORD_DOCUMENT = 7 + PDF = 8 + POWERPOINT_DOCUMENT = 9 + EXCEL_DOCUMENT = 10 + AVRO = 11 + CSV = 12 + TSV = 13 + + type_: BytesType = proto.Field( + proto.ENUM, + number=1, + enum=BytesType, + ) + data: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class ContentItem(proto.Message): + r""" + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (str): + String data to inspect or redact. + + This field is a member of `oneof`_ ``data_item``. + table (google.cloud.dlp_v2.types.Table): + Structured content for inspection. See + https://cloud.google.com/dlp/docs/inspecting-text#inspecting_a_table + to learn more. + + This field is a member of `oneof`_ ``data_item``. + byte_item (google.cloud.dlp_v2.types.ByteContentItem): + Content data to inspect or redact. Replaces ``type`` and + ``data``. + + This field is a member of `oneof`_ ``data_item``. + """ + + value: str = proto.Field( + proto.STRING, + number=3, + oneof='data_item', + ) + table: 'Table' = proto.Field( + proto.MESSAGE, + number=4, + oneof='data_item', + message='Table', + ) + byte_item: 'ByteContentItem' = proto.Field( + proto.MESSAGE, + number=5, + oneof='data_item', + message='ByteContentItem', + ) + + +class Table(proto.Message): + r"""Structured content to inspect. Up to 50,000 ``Value``\ s per request + allowed. See + https://cloud.google.com/dlp/docs/inspecting-structured-text#inspecting_a_table + to learn more. + + Attributes: + headers (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Headers of the table. + rows (MutableSequence[google.cloud.dlp_v2.types.Table.Row]): + Rows of the table. + """ + + class Row(proto.Message): + r"""Values of the row. + + Attributes: + values (MutableSequence[google.cloud.dlp_v2.types.Value]): + Individual cells. + """ + + values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + + headers: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + rows: MutableSequence[Row] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Row, + ) + + +class InspectResult(proto.Message): + r"""All the findings for a single scanned item. + + Attributes: + findings (MutableSequence[google.cloud.dlp_v2.types.Finding]): + List of findings for an item. + findings_truncated (bool): + If true, then this item might have more + findings than were returned, and the findings + returned are an arbitrary subset of all + findings. The findings list might be truncated + because the input items were too large, or + because the server reached the maximum amount of + resources allowed for a single API call. For + best results, divide the input into smaller + batches. + """ + + findings: MutableSequence['Finding'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Finding', + ) + findings_truncated: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class Finding(proto.Message): + r"""Represents a piece of potentially sensitive content. + + Attributes: + name (str): + Resource name in format + projects/{project}/locations/{location}/findings/{finding} + Populated only when viewing persisted findings. + quote (str): + The content that was found. Even if the content is not + textual, it may be converted to a textual representation + here. Provided if ``include_quote`` is true and the finding + is less than or equal to 4096 bytes long. If the finding + exceeds 4096 bytes in length, the quote may be omitted. + info_type (google.cloud.dlp_v2.types.InfoType): + The type of content that might have been found. Provided if + ``excluded_types`` is false. + likelihood (google.cloud.dlp_v2.types.Likelihood): + Confidence of how likely it is that the ``info_type`` is + correct. + location (google.cloud.dlp_v2.types.Location): + Where the content was found. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp when finding was detected. + quote_info (google.cloud.dlp_v2.types.QuoteInfo): + Contains data parsed from quotes. Only populated if + include_quote was set to true and a supported infoType was + requested. Currently supported infoTypes: DATE, + DATE_OF_BIRTH and TIME. + resource_name (str): + The job that stored the finding. + trigger_name (str): + Job trigger name, if applicable, for this + finding. + labels (MutableMapping[str, str]): + The labels associated with this ``Finding``. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + job_create_time (google.protobuf.timestamp_pb2.Timestamp): + Time the job started that produced this + finding. + job_name (str): + The job that stored the finding. + finding_id (str): + The unique finding id. + """ + + name: str = proto.Field( + proto.STRING, + number=14, + ) + quote: str = proto.Field( + proto.STRING, + number=1, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + message=storage.InfoType, + ) + likelihood: storage.Likelihood = proto.Field( + proto.ENUM, + number=3, + enum=storage.Likelihood, + ) + location: 'Location' = proto.Field( + proto.MESSAGE, + number=4, + message='Location', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + quote_info: 'QuoteInfo' = proto.Field( + proto.MESSAGE, + number=7, + message='QuoteInfo', + ) + resource_name: str = proto.Field( + proto.STRING, + number=8, + ) + trigger_name: str = proto.Field( + proto.STRING, + number=9, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=10, + ) + job_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + job_name: str = proto.Field( + proto.STRING, + number=13, + ) + finding_id: str = proto.Field( + proto.STRING, + number=15, + ) + + +class Location(proto.Message): + r"""Specifies the location of the finding. + + Attributes: + byte_range (google.cloud.dlp_v2.types.Range): + Zero-based byte offsets delimiting the + finding. These are relative to the finding's + containing element. Note that when the content + is not textual, this references the UTF-8 + encoded textual representation of the content. + Omitted if content is an image. + codepoint_range (google.cloud.dlp_v2.types.Range): + Unicode character offsets delimiting the + finding. These are relative to the finding's + containing element. Provided when the content is + text. + content_locations (MutableSequence[google.cloud.dlp_v2.types.ContentLocation]): + List of nested objects pointing to the + precise location of the finding within the file + or record. + container (google.cloud.dlp_v2.types.Container): + Information about the container where this + finding occurred, if available. + """ + + byte_range: 'Range' = proto.Field( + proto.MESSAGE, + number=1, + message='Range', + ) + codepoint_range: 'Range' = proto.Field( + proto.MESSAGE, + number=2, + message='Range', + ) + content_locations: MutableSequence['ContentLocation'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='ContentLocation', + ) + container: 'Container' = proto.Field( + proto.MESSAGE, + number=8, + message='Container', + ) + + +class ContentLocation(proto.Message): + r"""Precise location of the finding within a document, record, + image, or metadata container. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + container_name (str): + Name of the container where the finding is located. The top + level name is the source file name or table name. Names of + some common storage containers are formatted as follows: + + - BigQuery tables: ``{project_id}:{dataset_id}.{table_id}`` + - Cloud Storage files: ``gs://{bucket}/{path}`` + - Datastore namespace: {namespace} + + Nested names could be absent if the embedded object has no + string identifier (for example, an image contained within a + document). + record_location (google.cloud.dlp_v2.types.RecordLocation): + Location within a row or record of a database + table. + + This field is a member of `oneof`_ ``location``. + image_location (google.cloud.dlp_v2.types.ImageLocation): + Location within an image's pixels. + + This field is a member of `oneof`_ ``location``. + document_location (google.cloud.dlp_v2.types.DocumentLocation): + Location data for document files. + + This field is a member of `oneof`_ ``location``. + metadata_location (google.cloud.dlp_v2.types.MetadataLocation): + Location within the metadata for inspected + content. + + This field is a member of `oneof`_ ``location``. + container_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Finding container modification timestamp, if applicable. For + Cloud Storage, this field contains the last file + modification timestamp. For a BigQuery table, this field + contains the last_modified_time property. For Datastore, + this field isn't populated. + container_version (str): + Finding container version, if available + ("generation" for Cloud Storage). + """ + + container_name: str = proto.Field( + proto.STRING, + number=1, + ) + record_location: 'RecordLocation' = proto.Field( + proto.MESSAGE, + number=2, + oneof='location', + message='RecordLocation', + ) + image_location: 'ImageLocation' = proto.Field( + proto.MESSAGE, + number=3, + oneof='location', + message='ImageLocation', + ) + document_location: 'DocumentLocation' = proto.Field( + proto.MESSAGE, + number=5, + oneof='location', + message='DocumentLocation', + ) + metadata_location: 'MetadataLocation' = proto.Field( + proto.MESSAGE, + number=8, + oneof='location', + message='MetadataLocation', + ) + container_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + container_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class MetadataLocation(proto.Message): + r"""Metadata Location + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.dlp_v2.types.MetadataType): + Type of metadata containing the finding. + storage_label (google.cloud.dlp_v2.types.StorageMetadataLabel): + Storage metadata. + + This field is a member of `oneof`_ ``label``. + """ + + type_: 'MetadataType' = proto.Field( + proto.ENUM, + number=1, + enum='MetadataType', + ) + storage_label: 'StorageMetadataLabel' = proto.Field( + proto.MESSAGE, + number=3, + oneof='label', + message='StorageMetadataLabel', + ) + + +class StorageMetadataLabel(proto.Message): + r"""Storage metadata label to indicate which metadata entry + contains findings. + + Attributes: + key (str): + + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DocumentLocation(proto.Message): + r"""Location of a finding within a document. + + Attributes: + file_offset (int): + Offset of the line, from the beginning of the + file, where the finding is located. + """ + + file_offset: int = proto.Field( + proto.INT64, + number=1, + ) + + +class RecordLocation(proto.Message): + r"""Location of a finding within a row or record. + + Attributes: + record_key (google.cloud.dlp_v2.types.RecordKey): + Key of the finding. + field_id (google.cloud.dlp_v2.types.FieldId): + Field id of the field containing the finding. + table_location (google.cloud.dlp_v2.types.TableLocation): + Location within a ``ContentItem.Table``. + """ + + record_key: storage.RecordKey = proto.Field( + proto.MESSAGE, + number=1, + message=storage.RecordKey, + ) + field_id: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + table_location: 'TableLocation' = proto.Field( + proto.MESSAGE, + number=3, + message='TableLocation', + ) + + +class TableLocation(proto.Message): + r"""Location of a finding within a table. + + Attributes: + row_index (int): + The zero-based index of the row where the finding is + located. Only populated for resources that have a natural + ordering, not BigQuery. In BigQuery, to identify the row a + finding came from, populate + BigQueryOptions.identifying_fields with your primary key + column names and when you store the findings the value of + those columns will be stored inside of Finding. + """ + + row_index: int = proto.Field( + proto.INT64, + number=1, + ) + + +class Container(proto.Message): + r"""Represents a container that may contain DLP findings. + Examples of a container include a file, table, or database + record. + + Attributes: + type_ (str): + Container type, for example BigQuery or Cloud + Storage. + project_id (str): + Project where the finding was found. + Can be different from the project that owns the + finding. + full_path (str): + A string representation of the full container + name. Examples: + - BigQuery: 'Project:DataSetId.TableId' + - Cloud Storage: + 'gs://Bucket/folders/filename.txt' + root_path (str): + The root of the container. Examples: + + - For BigQuery table ``project_id:dataset_id.table_id``, + the root is ``dataset_id`` + - For Cloud Storage file + ``gs://bucket/folder/filename.txt``, the root is + ``gs://bucket`` + relative_path (str): + The rest of the path after the root. Examples: + + - For BigQuery table ``project_id:dataset_id.table_id``, + the relative path is ``table_id`` + - For Cloud Storage file + ``gs://bucket/folder/filename.txt``, the relative path is + ``folder/filename.txt`` + update_time (google.protobuf.timestamp_pb2.Timestamp): + Findings container modification timestamp, if applicable. + For Cloud Storage, this field contains the last file + modification timestamp. For a BigQuery table, this field + contains the last_modified_time property. For Datastore, + this field isn't populated. + version (str): + Findings container version, if available + ("generation" for Cloud Storage). + """ + + type_: str = proto.Field( + proto.STRING, + number=1, + ) + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + full_path: str = proto.Field( + proto.STRING, + number=3, + ) + root_path: str = proto.Field( + proto.STRING, + number=4, + ) + relative_path: str = proto.Field( + proto.STRING, + number=5, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class Range(proto.Message): + r"""Generic half-open interval [start, end) + + Attributes: + start (int): + Index of the first character of the range + (inclusive). + end (int): + Index of the last character of the range + (exclusive). + """ + + start: int = proto.Field( + proto.INT64, + number=1, + ) + end: int = proto.Field( + proto.INT64, + number=2, + ) + + +class ImageLocation(proto.Message): + r"""Location of the finding within an image. + + Attributes: + bounding_boxes (MutableSequence[google.cloud.dlp_v2.types.BoundingBox]): + Bounding boxes locating the pixels within the + image containing the finding. + """ + + bounding_boxes: MutableSequence['BoundingBox'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='BoundingBox', + ) + + +class BoundingBox(proto.Message): + r"""Bounding box encompassing detected text within an image. + + Attributes: + top (int): + Top coordinate of the bounding box. (0,0) is + upper left. + left (int): + Left coordinate of the bounding box. (0,0) is + upper left. + width (int): + Width of the bounding box in pixels. + height (int): + Height of the bounding box in pixels. + """ + + top: int = proto.Field( + proto.INT32, + number=1, + ) + left: int = proto.Field( + proto.INT32, + number=2, + ) + width: int = proto.Field( + proto.INT32, + number=3, + ) + height: int = proto.Field( + proto.INT32, + number=4, + ) + + +class RedactImageRequest(proto.Message): + r"""Request to search for potentially sensitive info in an image + and redact it by covering it with a colored rectangle. + + Attributes: + parent (str): + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + location_id (str): + Deprecated. This field has no effect. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. + image_redaction_configs (MutableSequence[google.cloud.dlp_v2.types.RedactImageRequest.ImageRedactionConfig]): + The configuration for specifying what content + to redact from images. + include_findings (bool): + Whether the response should include findings + along with the redacted image. + byte_item (google.cloud.dlp_v2.types.ByteContentItem): + The content must be PNG, JPEG, SVG or BMP. + """ + + class ImageRedactionConfig(proto.Message): + r"""Configuration for determining how redaction of images should + occur. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Only one per info_type should be provided per request. If + not specified, and redact_all_text is false, the DLP API + will redact all text that it matches against all info_types + that are found, but not specified in another + ImageRedactionConfig. + + This field is a member of `oneof`_ ``target``. + redact_all_text (bool): + If true, all text found in the image, regardless whether it + matches an info_type, is redacted. Only one should be + provided. + + This field is a member of `oneof`_ ``target``. + redaction_color (google.cloud.dlp_v2.types.Color): + The color to use when redacting content from + an image. If not specified, the default is + black. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + oneof='target', + message=storage.InfoType, + ) + redact_all_text: bool = proto.Field( + proto.BOOL, + number=2, + oneof='target', + ) + redaction_color: 'Color' = proto.Field( + proto.MESSAGE, + number=3, + message='Color', + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + location_id: str = proto.Field( + proto.STRING, + number=8, + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + image_redaction_configs: MutableSequence[ImageRedactionConfig] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=ImageRedactionConfig, + ) + include_findings: bool = proto.Field( + proto.BOOL, + number=6, + ) + byte_item: 'ByteContentItem' = proto.Field( + proto.MESSAGE, + number=7, + message='ByteContentItem', + ) + + +class Color(proto.Message): + r"""Represents a color in the RGB color space. + + Attributes: + red (float): + The amount of red in the color as a value in the interval + [0, 1]. + green (float): + The amount of green in the color as a value in the interval + [0, 1]. + blue (float): + The amount of blue in the color as a value in the interval + [0, 1]. + """ + + red: float = proto.Field( + proto.FLOAT, + number=1, + ) + green: float = proto.Field( + proto.FLOAT, + number=2, + ) + blue: float = proto.Field( + proto.FLOAT, + number=3, + ) + + +class RedactImageResponse(proto.Message): + r"""Results of redacting an image. + + Attributes: + redacted_image (bytes): + The redacted image. The type will be the same + as the original image. + extracted_text (str): + If an image was being inspected and the InspectConfig's + include_quote was set to true, then this field will include + all text, if any, that was found in the image. + inspect_result (google.cloud.dlp_v2.types.InspectResult): + The findings. Populated when include_findings in the request + is true. + """ + + redacted_image: bytes = proto.Field( + proto.BYTES, + number=1, + ) + extracted_text: str = proto.Field( + proto.STRING, + number=2, + ) + inspect_result: 'InspectResult' = proto.Field( + proto.MESSAGE, + number=3, + message='InspectResult', + ) + + +class DeidentifyContentRequest(proto.Message): + r"""Request to de-identify a ContentItem. + + Attributes: + parent (str): + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): + Configuration for the de-identification of the content item. + Items specified here will override the template referenced + by the deidentify_template_name argument. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. Items specified here will + override the template referenced by the + inspect_template_name argument. + item (google.cloud.dlp_v2.types.ContentItem): + The item to de-identify. Will be treated as text. + + This value must be of type + [Table][google.privacy.dlp.v2.Table] if your + [deidentify_config][google.privacy.dlp.v2.DeidentifyContentRequest.deidentify_config] + is a + [RecordTransformations][google.privacy.dlp.v2.RecordTransformations] + object. + inspect_template_name (str): + Template to use. Any configuration directly specified in + inspect_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + deidentify_template_name (str): + Template to use. Any configuration directly specified in + deidentify_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + deidentify_config: 'DeidentifyConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyConfig', + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='InspectConfig', + ) + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=4, + message='ContentItem', + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=5, + ) + deidentify_template_name: str = proto.Field( + proto.STRING, + number=6, + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class DeidentifyContentResponse(proto.Message): + r"""Results of de-identifying a ContentItem. + + Attributes: + item (google.cloud.dlp_v2.types.ContentItem): + The de-identified item. + overview (google.cloud.dlp_v2.types.TransformationOverview): + An overview of the changes that were made on the ``item``. + """ + + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=1, + message='ContentItem', + ) + overview: 'TransformationOverview' = proto.Field( + proto.MESSAGE, + number=2, + message='TransformationOverview', + ) + + +class ReidentifyContentRequest(proto.Message): + r"""Request to re-identify an item. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + reidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): + Configuration for the re-identification of the content item. + This field shares the same proto message type that is used + for de-identification, however its usage here is for the + reversal of the previous de-identification. + Re-identification is performed by examining the + transformations used to de-identify the items and executing + the reverse. This requires that only reversible + transformations be provided here. The reversible + transformations are: + + - ``CryptoDeterministicConfig`` + - ``CryptoReplaceFfxFpeConfig`` + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. + item (google.cloud.dlp_v2.types.ContentItem): + The item to re-identify. Will be treated as + text. + inspect_template_name (str): + Template to use. Any configuration directly specified in + ``inspect_config`` will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + reidentify_template_name (str): + Template to use. References an instance of + ``DeidentifyTemplate``. Any configuration directly specified + in ``reidentify_config`` or ``inspect_config`` will override + those set in the template. The ``DeidentifyTemplate`` used + must include only reversible transformations. Singular + fields that are set in this request will replace their + corresponding fields in the template. Repeated fields are + appended. Singular sub-messages and groups are recursively + merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + reidentify_config: 'DeidentifyConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyConfig', + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='InspectConfig', + ) + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=4, + message='ContentItem', + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=5, + ) + reidentify_template_name: str = proto.Field( + proto.STRING, + number=6, + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ReidentifyContentResponse(proto.Message): + r"""Results of re-identifying an item. + + Attributes: + item (google.cloud.dlp_v2.types.ContentItem): + The re-identified item. + overview (google.cloud.dlp_v2.types.TransformationOverview): + An overview of the changes that were made to the ``item``. + """ + + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=1, + message='ContentItem', + ) + overview: 'TransformationOverview' = proto.Field( + proto.MESSAGE, + number=2, + message='TransformationOverview', + ) + + +class InspectContentRequest(proto.Message): + r"""Request to search for potentially sensitive info in a + ContentItem. + + Attributes: + parent (str): + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. What specified here will + override the template referenced by the + inspect_template_name argument. + item (google.cloud.dlp_v2.types.ContentItem): + The item to inspect. + inspect_template_name (str): + Template to use. Any configuration directly specified in + inspect_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=3, + message='ContentItem', + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class InspectContentResponse(proto.Message): + r"""Results of inspecting an item. + + Attributes: + result (google.cloud.dlp_v2.types.InspectResult): + The findings. + """ + + result: 'InspectResult' = proto.Field( + proto.MESSAGE, + number=1, + message='InspectResult', + ) + + +class OutputStorageConfig(proto.Message): + r"""Cloud repository for storing output. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Store findings in an existing table or a new table in an + existing dataset. If table_id is not set a new one will be + generated for you with the following format: + dlp_googleapis_yyyy_mm_dd_[dlp_job_id]. Pacific time zone + will be used for generating the date details. + + For Inspect, each column in an existing output table must + have the same name, type, and mode of a field in the + ``Finding`` object. + + For Risk, an existing output table should be the output of a + previous Risk analysis job run on the same source table, + with the same privacy metric and quasi-identifiers. Risk + jobs that analyze the same table but compute a different + privacy metric, or use different sets of quasi-identifiers, + cannot store their results in the same table. + + This field is a member of `oneof`_ ``type``. + output_schema (google.cloud.dlp_v2.types.OutputStorageConfig.OutputSchema): + Schema used for writing the findings for Inspect jobs. This + field is only used for Inspect and must be unspecified for + Risk jobs. Columns are derived from the ``Finding`` object. + If appending to an existing table, any columns from the + predefined schema that are missing will be added. No columns + in the existing table will be deleted. + + If unspecified, then all available columns will be used for + a new table or an (existing) table with no schema, and no + changes will be made to an existing table that has a schema. + Only for use with external storage. + """ + class OutputSchema(proto.Enum): + r"""Predefined schemas for storing findings. + Only for use with external storage. + + Values: + OUTPUT_SCHEMA_UNSPECIFIED (0): + Unused. + BASIC_COLUMNS (1): + Basic schema including only ``info_type``, ``quote``, + ``certainty``, and ``timestamp``. + GCS_COLUMNS (2): + Schema tailored to findings from scanning + Cloud Storage. + DATASTORE_COLUMNS (3): + Schema tailored to findings from scanning + Google Datastore. + BIG_QUERY_COLUMNS (4): + Schema tailored to findings from scanning + Google BigQuery. + ALL_COLUMNS (5): + Schema containing all columns. + """ + OUTPUT_SCHEMA_UNSPECIFIED = 0 + BASIC_COLUMNS = 1 + GCS_COLUMNS = 2 + DATASTORE_COLUMNS = 3 + BIG_QUERY_COLUMNS = 4 + ALL_COLUMNS = 5 + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.BigQueryTable, + ) + output_schema: OutputSchema = proto.Field( + proto.ENUM, + number=3, + enum=OutputSchema, + ) + + +class InfoTypeStats(proto.Message): + r"""Statistics regarding a specific InfoType. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + The type of finding this stat is for. + count (int): + Number of findings for this infoType. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + count: int = proto.Field( + proto.INT64, + number=2, + ) + + +class InspectDataSourceDetails(proto.Message): + r"""The results of an inspect DataSource job. + + Attributes: + requested_options (google.cloud.dlp_v2.types.InspectDataSourceDetails.RequestedOptions): + The configuration used for this job. + result (google.cloud.dlp_v2.types.InspectDataSourceDetails.Result): + A summary of the outcome of this inspection + job. + """ + + class RequestedOptions(proto.Message): + r"""Snapshot of the inspection configuration. + + Attributes: + snapshot_inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + If run with an InspectTemplate, a snapshot of + its state at the time of this run. + job_config (google.cloud.dlp_v2.types.InspectJobConfig): + Inspect config. + """ + + snapshot_inspect_template: 'InspectTemplate' = proto.Field( + proto.MESSAGE, + number=1, + message='InspectTemplate', + ) + job_config: 'InspectJobConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='InspectJobConfig', + ) + + class Result(proto.Message): + r"""All result fields mentioned below are updated while the job + is processing. + + Attributes: + processed_bytes (int): + Total size in bytes that were processed. + total_estimated_bytes (int): + Estimate of the number of bytes to process. + info_type_stats (MutableSequence[google.cloud.dlp_v2.types.InfoTypeStats]): + Statistics of how many instances of each info + type were found during inspect job. + hybrid_stats (google.cloud.dlp_v2.types.HybridInspectStatistics): + Statistics related to the processing of + hybrid inspect. + """ + + processed_bytes: int = proto.Field( + proto.INT64, + number=1, + ) + total_estimated_bytes: int = proto.Field( + proto.INT64, + number=2, + ) + info_type_stats: MutableSequence['InfoTypeStats'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='InfoTypeStats', + ) + hybrid_stats: 'HybridInspectStatistics' = proto.Field( + proto.MESSAGE, + number=7, + message='HybridInspectStatistics', + ) + + requested_options: RequestedOptions = proto.Field( + proto.MESSAGE, + number=2, + message=RequestedOptions, + ) + result: Result = proto.Field( + proto.MESSAGE, + number=3, + message=Result, + ) + + +class HybridInspectStatistics(proto.Message): + r"""Statistics related to processing hybrid inspect requests. + + Attributes: + processed_count (int): + The number of hybrid inspection requests + processed within this job. + aborted_count (int): + The number of hybrid inspection requests + aborted because the job ran out of quota or was + ended before they could be processed. + pending_count (int): + The number of hybrid requests currently being processed. + Only populated when called via method ``getDlpJob``. A burst + of traffic may cause hybrid inspect requests to be enqueued. + Processing will take place as quickly as possible, but + resource limitations may impact how long a request is + enqueued for. + """ + + processed_count: int = proto.Field( + proto.INT64, + number=1, + ) + aborted_count: int = proto.Field( + proto.INT64, + number=2, + ) + pending_count: int = proto.Field( + proto.INT64, + number=3, + ) + + +class InfoTypeDescription(proto.Message): + r"""InfoType description. + + Attributes: + name (str): + Internal name of the infoType. + display_name (str): + Human readable form of the infoType name. + supported_by (MutableSequence[google.cloud.dlp_v2.types.InfoTypeSupportedBy]): + Which parts of the API supports this + InfoType. + description (str): + Description of the infotype. Translated when + language is provided in the request. + versions (MutableSequence[google.cloud.dlp_v2.types.VersionDescription]): + A list of available versions for the + infotype. + categories (MutableSequence[google.cloud.dlp_v2.types.InfoTypeCategory]): + The category of the infoType. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + supported_by: MutableSequence['InfoTypeSupportedBy'] = proto.RepeatedField( + proto.ENUM, + number=3, + enum='InfoTypeSupportedBy', + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + versions: MutableSequence['VersionDescription'] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message='VersionDescription', + ) + categories: MutableSequence['InfoTypeCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message='InfoTypeCategory', + ) + + +class InfoTypeCategory(proto.Message): + r"""Classification of infoTypes to organize them according to + geographic location, industry, and data type. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + location_category (google.cloud.dlp_v2.types.InfoTypeCategory.LocationCategory): + The region or country that issued the ID or + document represented by the infoType. + + This field is a member of `oneof`_ ``category``. + industry_category (google.cloud.dlp_v2.types.InfoTypeCategory.IndustryCategory): + The group of relevant businesses where this + infoType is commonly used + + This field is a member of `oneof`_ ``category``. + type_category (google.cloud.dlp_v2.types.InfoTypeCategory.TypeCategory): + The class of identifiers where this infoType + belongs + + This field is a member of `oneof`_ ``category``. + """ + class LocationCategory(proto.Enum): + r"""Enum of the current locations. + We might add more locations in the future. + + Values: + LOCATION_UNSPECIFIED (0): + Unused location + GLOBAL (1): + The infoType is not issued by or tied to a + specific region, but is used almost everywhere. + ARGENTINA (2): + The infoType is typically used in Argentina. + AUSTRALIA (3): + The infoType is typically used in Australia. + BELGIUM (4): + The infoType is typically used in Belgium. + BRAZIL (5): + The infoType is typically used in Brazil. + CANADA (6): + The infoType is typically used in Canada. + CHILE (7): + The infoType is typically used in Chile. + CHINA (8): + The infoType is typically used in China. + COLOMBIA (9): + The infoType is typically used in Colombia. + DENMARK (10): + The infoType is typically used in Denmark. + FRANCE (11): + The infoType is typically used in France. + FINLAND (12): + The infoType is typically used in Finland. + GERMANY (13): + The infoType is typically used in Germany. + HONG_KONG (14): + The infoType is typically used in Hong Kong. + INDIA (15): + The infoType is typically used in India. + INDONESIA (16): + The infoType is typically used in Indonesia. + IRELAND (17): + The infoType is typically used in Ireland. + ISRAEL (18): + The infoType is typically used in Israel. + ITALY (19): + The infoType is typically used in Italy. + JAPAN (20): + The infoType is typically used in Japan. + KOREA (21): + The infoType is typically used in Korea. + MEXICO (22): + The infoType is typically used in Mexico. + THE_NETHERLANDS (23): + The infoType is typically used in the + Netherlands. + NORWAY (24): + The infoType is typically used in Norway. + PARAGUAY (25): + The infoType is typically used in Paraguay. + PERU (26): + The infoType is typically used in Peru. + POLAND (27): + The infoType is typically used in Poland. + PORTUGAL (28): + The infoType is typically used in Portugal. + SINGAPORE (29): + The infoType is typically used in Singapore. + SOUTH_AFRICA (30): + The infoType is typically used in South + Africa. + SPAIN (31): + The infoType is typically used in Spain. + SWEDEN (32): + The infoType is typically used in Sweden. + TAIWAN (33): + The infoType is typically used in Taiwan. + THAILAND (34): + The infoType is typically used in Thailand. + TURKEY (35): + The infoType is typically used in Turkey. + UNITED_KINGDOM (36): + The infoType is typically used in the United + Kingdom. + UNITED_STATES (37): + The infoType is typically used in the United + States. + URUGUAY (38): + The infoType is typically used in Uruguay. + VENEZUELA (39): + The infoType is typically used in Venezuela. + INTERNAL (40): + The infoType is typically used in Google + internally. + NEW_ZEALAND (41): + The infoType is typically used in New + Zealand. + """ + LOCATION_UNSPECIFIED = 0 + GLOBAL = 1 + ARGENTINA = 2 + AUSTRALIA = 3 + BELGIUM = 4 + BRAZIL = 5 + CANADA = 6 + CHILE = 7 + CHINA = 8 + COLOMBIA = 9 + DENMARK = 10 + FRANCE = 11 + FINLAND = 12 + GERMANY = 13 + HONG_KONG = 14 + INDIA = 15 + INDONESIA = 16 + IRELAND = 17 + ISRAEL = 18 + ITALY = 19 + JAPAN = 20 + KOREA = 21 + MEXICO = 22 + THE_NETHERLANDS = 23 + NORWAY = 24 + PARAGUAY = 25 + PERU = 26 + POLAND = 27 + PORTUGAL = 28 + SINGAPORE = 29 + SOUTH_AFRICA = 30 + SPAIN = 31 + SWEDEN = 32 + TAIWAN = 33 + THAILAND = 34 + TURKEY = 35 + UNITED_KINGDOM = 36 + UNITED_STATES = 37 + URUGUAY = 38 + VENEZUELA = 39 + INTERNAL = 40 + NEW_ZEALAND = 41 + + class IndustryCategory(proto.Enum): + r"""Enum of the current industries in the category. + We might add more industries in the future. + + Values: + INDUSTRY_UNSPECIFIED (0): + Unused industry + FINANCE (1): + The infoType is typically used in the finance + industry. + HEALTH (2): + The infoType is typically used in the health + industry. + TELECOMMUNICATIONS (3): + The infoType is typically used in the + telecommunications industry. + """ + INDUSTRY_UNSPECIFIED = 0 + FINANCE = 1 + HEALTH = 2 + TELECOMMUNICATIONS = 3 + + class TypeCategory(proto.Enum): + r"""Enum of the current types in the category. + We might add more types in the future. + + Values: + TYPE_UNSPECIFIED (0): + Unused type + PII (1): + Personally identifiable information, for + example, a name or phone number + SPII (2): + Personally identifiable information that is + especially sensitive, for example, a passport + number. + DEMOGRAPHIC (3): + Attributes that can partially identify + someone, especially in combination with other + attributes, like age, height, and gender. + CREDENTIAL (4): + Confidential or secret information, for + example, a password. + GOVERNMENT_ID (5): + An identification document issued by a + government. + DOCUMENT (6): + A document, for example, a resume or source + code. + CONTEXTUAL_INFORMATION (7): + Information that is not sensitive on its own, + but provides details about the circumstances + surrounding an entity or an event. + """ + TYPE_UNSPECIFIED = 0 + PII = 1 + SPII = 2 + DEMOGRAPHIC = 3 + CREDENTIAL = 4 + GOVERNMENT_ID = 5 + DOCUMENT = 6 + CONTEXTUAL_INFORMATION = 7 + + location_category: LocationCategory = proto.Field( + proto.ENUM, + number=1, + oneof='category', + enum=LocationCategory, + ) + industry_category: IndustryCategory = proto.Field( + proto.ENUM, + number=2, + oneof='category', + enum=IndustryCategory, + ) + type_category: TypeCategory = proto.Field( + proto.ENUM, + number=3, + oneof='category', + enum=TypeCategory, + ) + + +class VersionDescription(proto.Message): + r"""Details about each available version for an infotype. + + Attributes: + version (str): + Name of the version + description (str): + Description of the version. + """ + + version: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListInfoTypesRequest(proto.Message): + r"""Request for the list of infoTypes. + + Attributes: + parent (str): + The parent resource name. + + The format of this value is as follows: + + :: + + locations/LOCATION_ID + language_code (str): + BCP-47 language code for localized infoType + friendly names. If omitted, or if localized + strings are not available, en-US strings will be + returned. + filter (str): + filter to only return infoTypes supported by certain parts + of the API. Defaults to supported_by=INSPECT. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=4, + ) + language_code: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + location_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListInfoTypesResponse(proto.Message): + r"""Response to the ListInfoTypes request. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoTypeDescription]): + Set of sensitive infoTypes. + """ + + info_types: MutableSequence['InfoTypeDescription'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='InfoTypeDescription', + ) + + +class RiskAnalysisJobConfig(proto.Message): + r"""Configuration for a risk analysis job. See + https://cloud.google.com/dlp/docs/concepts-risk-analysis to + learn more. + + Attributes: + privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): + Privacy metric to compute. + source_table (google.cloud.dlp_v2.types.BigQueryTable): + Input dataset to compute metrics over. + actions (MutableSequence[google.cloud.dlp_v2.types.Action]): + Actions to execute at the completion of the + job. Are executed in the order provided. + """ + + privacy_metric: 'PrivacyMetric' = proto.Field( + proto.MESSAGE, + number=1, + message='PrivacyMetric', + ) + source_table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=2, + message=storage.BigQueryTable, + ) + actions: MutableSequence['Action'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='Action', + ) + + +class QuasiId(proto.Message): + r"""A column with a semantic tag attached. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Required. Identifies the column. + info_type (google.cloud.dlp_v2.types.InfoType): + A column can be tagged with a InfoType to use the relevant + public dataset as a statistical model of population, if + available. We currently support US ZIP codes, region codes, + ages and genders. To programmatically obtain the list of + supported InfoTypes, use ListInfoTypes with the + supported_by=RISK_ANALYSIS filter. + + This field is a member of `oneof`_ ``tag``. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + + This field is a member of `oneof`_ ``tag``. + inferred (google.protobuf.empty_pb2.Empty): + If no semantic tag is indicated, we infer the + statistical model from the distribution of + values in the input data + + This field is a member of `oneof`_ ``tag``. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + oneof='tag', + message=storage.InfoType, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=3, + oneof='tag', + ) + inferred: empty_pb2.Empty = proto.Field( + proto.MESSAGE, + number=4, + oneof='tag', + message=empty_pb2.Empty, + ) + + +class StatisticalTable(proto.Message): + r"""An auxiliary table containing statistical information on the + relative frequency of different quasi-identifiers values. It has + one or several quasi-identifiers columns, and one column that + indicates the relative frequency of each quasi-identifier tuple. + If a tuple is present in the data but not in the auxiliary + table, the corresponding relative frequency is assumed to be + zero (and thus, the tuple is highly reidentifiable). + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Required. Auxiliary table location. + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.StatisticalTable.QuasiIdentifierField]): + Required. Quasi-identifier columns. + relative_frequency (google.cloud.dlp_v2.types.FieldId): + Required. The relative frequency column must + contain a floating-point number between 0 and 1 + (inclusive). Null values are assumed to be zero. + """ + + class QuasiIdentifierField(proto.Message): + r"""A quasi-identifier column has a custom_tag, used to know which + column in the data corresponds to which column in the statistical + model. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Identifies the column. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=2, + ) + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=3, + message=storage.BigQueryTable, + ) + quasi_ids: MutableSequence[QuasiIdentifierField] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=QuasiIdentifierField, + ) + relative_frequency: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + + +class PrivacyMetric(proto.Message): + r"""Privacy metric to compute for reidentification risk analysis. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + numerical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.NumericalStatsConfig): + Numerical stats + + This field is a member of `oneof`_ ``type``. + categorical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.CategoricalStatsConfig): + Categorical stats + + This field is a member of `oneof`_ ``type``. + k_anonymity_config (google.cloud.dlp_v2.types.PrivacyMetric.KAnonymityConfig): + K-anonymity + + This field is a member of `oneof`_ ``type``. + l_diversity_config (google.cloud.dlp_v2.types.PrivacyMetric.LDiversityConfig): + l-diversity + + This field is a member of `oneof`_ ``type``. + k_map_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig): + k-map + + This field is a member of `oneof`_ ``type``. + delta_presence_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.DeltaPresenceEstimationConfig): + delta-presence + + This field is a member of `oneof`_ ``type``. + """ + + class NumericalStatsConfig(proto.Message): + r"""Compute numerical stats over an individual column, including + min, max, and quantiles. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Field to compute numerical stats on. + Supported types are integer, float, date, + datetime, timestamp, time. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + + class CategoricalStatsConfig(proto.Message): + r"""Compute numerical stats over an individual column, including + number of distinct values and value count distribution. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Field to compute categorical stats on. All + column types are supported except for arrays and + structs. However, it may be more informative to + use NumericalStats when the field type is + supported, depending on the data. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + + class KAnonymityConfig(proto.Message): + r"""k-anonymity metric, used for analysis of reidentification + risk. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Set of fields to compute k-anonymity over. + When multiple fields are specified, they are + considered a single composite key. Structs and + repeated data types are not supported; however, + nested fields are supported so long as they are + not structs themselves or nested within a + repeated field. + entity_id (google.cloud.dlp_v2.types.EntityId): + Message indicating that multiple rows might be associated to + a single individual. If the same entity_id is associated to + multiple quasi-identifier tuples over distinct rows, we + consider the entire collection of tuples as the composite + quasi-identifier. This collection is a multiset: the order + in which the different tuples appear in the dataset is + ignored, but their frequency is taken into account. + + Important note: a maximum of 1000 rows can be associated to + a single entity ID. If more rows are associated with the + same entity ID, some might be ignored. + """ + + quasi_ids: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + entity_id: storage.EntityId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.EntityId, + ) + + class LDiversityConfig(proto.Message): + r"""l-diversity metric, used for analysis of reidentification + risk. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Set of quasi-identifiers indicating how + equivalence classes are defined for the + l-diversity computation. When multiple fields + are specified, they are considered a single + composite key. + sensitive_attribute (google.cloud.dlp_v2.types.FieldId): + Sensitive field for computing the l-value. + """ + + quasi_ids: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + sensitive_attribute: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + + class KMapEstimationConfig(proto.Message): + r"""Reidentifiability metric. This corresponds to a risk model + similar to what is called "journalist risk" in the literature, + except the attack dataset is statistically modeled instead of + being perfectly known. This can be done using publicly available + data (like the US Census), or using a custom statistical model + (indicated as one or several BigQuery tables), or by + extrapolating from the distribution of values in the input + dataset. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.TaggedField]): + Required. Fields considered to be + quasi-identifiers. No two columns can have the + same tag. + region_code (str): + ISO 3166-1 alpha-2 region code to use in the statistical + modeling. Set if no column is tagged with a region-specific + InfoType (like US_ZIP_5) or a region code. + auxiliary_tables (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable]): + Several auxiliary tables can be used in the analysis. Each + custom_tag used to tag a quasi-identifiers column must + appear in exactly one column of one auxiliary table. + """ + + class TaggedField(proto.Message): + r"""A column with a semantic tag attached. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Required. Identifies the column. + info_type (google.cloud.dlp_v2.types.InfoType): + A column can be tagged with a InfoType to use the relevant + public dataset as a statistical model of population, if + available. We currently support US ZIP codes, region codes, + ages and genders. To programmatically obtain the list of + supported InfoTypes, use ListInfoTypes with the + supported_by=RISK_ANALYSIS filter. + + This field is a member of `oneof`_ ``tag``. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + + This field is a member of `oneof`_ ``tag``. + inferred (google.protobuf.empty_pb2.Empty): + If no semantic tag is indicated, we infer the + statistical model from the distribution of + values in the input data + + This field is a member of `oneof`_ ``tag``. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + oneof='tag', + message=storage.InfoType, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=3, + oneof='tag', + ) + inferred: empty_pb2.Empty = proto.Field( + proto.MESSAGE, + number=4, + oneof='tag', + message=empty_pb2.Empty, + ) + + class AuxiliaryTable(proto.Message): + r"""An auxiliary table contains statistical information on the + relative frequency of different quasi-identifiers values. It has + one or several quasi-identifiers columns, and one column that + indicates the relative frequency of each quasi-identifier tuple. + If a tuple is present in the data but not in the auxiliary + table, the corresponding relative frequency is assumed to be + zero (and thus, the tuple is highly reidentifiable). + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Required. Auxiliary table location. + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField]): + Required. Quasi-identifier columns. + relative_frequency (google.cloud.dlp_v2.types.FieldId): + Required. The relative frequency column must + contain a floating-point number between 0 and 1 + (inclusive). Null values are assumed to be zero. + """ + + class QuasiIdField(proto.Message): + r"""A quasi-identifier column has a custom_tag, used to know which + column in the data corresponds to which column in the statistical + model. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Identifies the column. + custom_tag (str): + A auxiliary field. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=2, + ) + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=3, + message=storage.BigQueryTable, + ) + quasi_ids: MutableSequence['PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField', + ) + relative_frequency: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + + quasi_ids: MutableSequence['PrivacyMetric.KMapEstimationConfig.TaggedField'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='PrivacyMetric.KMapEstimationConfig.TaggedField', + ) + region_code: str = proto.Field( + proto.STRING, + number=2, + ) + auxiliary_tables: MutableSequence['PrivacyMetric.KMapEstimationConfig.AuxiliaryTable'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable', + ) + + class DeltaPresenceEstimationConfig(proto.Message): + r"""δ-presence metric, used to estimate how likely it is for an + attacker to figure out that one given individual appears in a + de-identified dataset. Similarly to the k-map metric, we cannot + compute δ-presence exactly without knowing the attack dataset, + so we use a statistical model instead. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.QuasiId]): + Required. Fields considered to be + quasi-identifiers. No two fields can have the + same tag. + region_code (str): + ISO 3166-1 alpha-2 region code to use in the statistical + modeling. Set if no column is tagged with a region-specific + InfoType (like US_ZIP_5) or a region code. + auxiliary_tables (MutableSequence[google.cloud.dlp_v2.types.StatisticalTable]): + Several auxiliary tables can be used in the analysis. Each + custom_tag used to tag a quasi-identifiers field must appear + in exactly one field of one auxiliary table. + """ + + quasi_ids: MutableSequence['QuasiId'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='QuasiId', + ) + region_code: str = proto.Field( + proto.STRING, + number=2, + ) + auxiliary_tables: MutableSequence['StatisticalTable'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='StatisticalTable', + ) + + numerical_stats_config: NumericalStatsConfig = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=NumericalStatsConfig, + ) + categorical_stats_config: CategoricalStatsConfig = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message=CategoricalStatsConfig, + ) + k_anonymity_config: KAnonymityConfig = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message=KAnonymityConfig, + ) + l_diversity_config: LDiversityConfig = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message=LDiversityConfig, + ) + k_map_estimation_config: KMapEstimationConfig = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message=KMapEstimationConfig, + ) + delta_presence_estimation_config: DeltaPresenceEstimationConfig = proto.Field( + proto.MESSAGE, + number=6, + oneof='type', + message=DeltaPresenceEstimationConfig, + ) + + +class AnalyzeDataSourceRiskDetails(proto.Message): + r"""Result of a risk analysis operation request. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + requested_privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): + Privacy metric to compute. + requested_source_table (google.cloud.dlp_v2.types.BigQueryTable): + Input dataset to compute metrics over. + numerical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.NumericalStatsResult): + Numerical stats result + + This field is a member of `oneof`_ ``result``. + categorical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult): + Categorical stats result + + This field is a member of `oneof`_ ``result``. + k_anonymity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult): + K-anonymity result + + This field is a member of `oneof`_ ``result``. + l_diversity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult): + L-divesity result + + This field is a member of `oneof`_ ``result``. + k_map_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult): + K-map result + + This field is a member of `oneof`_ ``result``. + delta_presence_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult): + Delta-presence result + + This field is a member of `oneof`_ ``result``. + requested_options (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.RequestedRiskAnalysisOptions): + The configuration used for this job. + """ + + class NumericalStatsResult(proto.Message): + r"""Result of the numerical stats computation. + + Attributes: + min_value (google.cloud.dlp_v2.types.Value): + Minimum value appearing in the column. + max_value (google.cloud.dlp_v2.types.Value): + Maximum value appearing in the column. + quantile_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + List of 99 values that partition the set of + field values into 100 equal sized buckets. + """ + + min_value: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + max_value: 'Value' = proto.Field( + proto.MESSAGE, + number=2, + message='Value', + ) + quantile_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='Value', + ) + + class CategoricalStatsResult(proto.Message): + r"""Result of the categorical stats computation. + + Attributes: + value_frequency_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket]): + Histogram of value frequencies in the column. + """ + + class CategoricalStatsHistogramBucket(proto.Message): + r"""Histogram of value frequencies in the column. + + Attributes: + value_frequency_lower_bound (int): + Lower bound on the value frequency of the + values in this bucket. + value_frequency_upper_bound (int): + Upper bound on the value frequency of the + values in this bucket. + bucket_size (int): + Total number of values in this bucket. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.ValueFrequency]): + Sample of value frequencies in this bucket. + The total number of values returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct values in this + bucket. + """ + + value_frequency_lower_bound: int = proto.Field( + proto.INT64, + number=1, + ) + value_frequency_upper_bound: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=3, + ) + bucket_values: MutableSequence['ValueFrequency'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='ValueFrequency', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=5, + ) + + value_frequency_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket', + ) + + class KAnonymityResult(proto.Message): + r"""Result of the k-anonymity computation. + + Attributes: + equivalence_class_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket]): + Histogram of k-anonymity equivalence classes. + """ + + class KAnonymityEquivalenceClass(proto.Message): + r"""The set of columns' values that share the same ldiversity + value + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + Set of values defining the equivalence class. + One value per quasi-identifier column in the + original KAnonymity metric message. The order is + always the same as the original request. + equivalence_class_size (int): + Size of the equivalence class, for example + number of rows with the above set of values. + """ + + quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + equivalence_class_size: int = proto.Field( + proto.INT64, + number=2, + ) + + class KAnonymityHistogramBucket(proto.Message): + r"""Histogram of k-anonymity equivalence classes. + + Attributes: + equivalence_class_size_lower_bound (int): + Lower bound on the size of the equivalence + classes in this bucket. + equivalence_class_size_upper_bound (int): + Upper bound on the size of the equivalence + classes in this bucket. + bucket_size (int): + Total number of equivalence classes in this + bucket. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass]): + Sample of equivalence classes in this bucket. + The total number of classes returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct equivalence classes + in this bucket. + """ + + equivalence_class_size_lower_bound: int = proto.Field( + proto.INT64, + number=1, + ) + equivalence_class_size_upper_bound: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=3, + ) + bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=5, + ) + + equivalence_class_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket', + ) + + class LDiversityResult(proto.Message): + r"""Result of the l-diversity computation. + + Attributes: + sensitive_value_frequency_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket]): + Histogram of l-diversity equivalence class + sensitive value frequencies. + """ + + class LDiversityEquivalenceClass(proto.Message): + r"""The set of columns' values that share the same ldiversity + value. + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + Quasi-identifier values defining the + k-anonymity equivalence class. The order is + always the same as the original request. + equivalence_class_size (int): + Size of the k-anonymity equivalence class. + num_distinct_sensitive_values (int): + Number of distinct sensitive values in this + equivalence class. + top_sensitive_values (MutableSequence[google.cloud.dlp_v2.types.ValueFrequency]): + Estimated frequencies of top sensitive + values. + """ + + quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + equivalence_class_size: int = proto.Field( + proto.INT64, + number=2, + ) + num_distinct_sensitive_values: int = proto.Field( + proto.INT64, + number=3, + ) + top_sensitive_values: MutableSequence['ValueFrequency'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='ValueFrequency', + ) + + class LDiversityHistogramBucket(proto.Message): + r"""Histogram of l-diversity equivalence class sensitive value + frequencies. + + Attributes: + sensitive_value_frequency_lower_bound (int): + Lower bound on the sensitive value + frequencies of the equivalence classes in this + bucket. + sensitive_value_frequency_upper_bound (int): + Upper bound on the sensitive value + frequencies of the equivalence classes in this + bucket. + bucket_size (int): + Total number of equivalence classes in this + bucket. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass]): + Sample of equivalence classes in this bucket. + The total number of classes returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct equivalence classes + in this bucket. + """ + + sensitive_value_frequency_lower_bound: int = proto.Field( + proto.INT64, + number=1, + ) + sensitive_value_frequency_upper_bound: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=3, + ) + bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=5, + ) + + sensitive_value_frequency_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket', + ) + + class KMapEstimationResult(proto.Message): + r"""Result of the reidentifiability analysis. Note that these + results are an estimation, not exact values. + + Attributes: + k_map_estimation_histogram (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket]): + The intervals [min_anonymity, max_anonymity] do not overlap. + If a value doesn't correspond to any such interval, the + associated frequency is zero. For example, the following + records: {min_anonymity: 1, max_anonymity: 1, frequency: 17} + {min_anonymity: 2, max_anonymity: 3, frequency: 42} + {min_anonymity: 5, max_anonymity: 10, frequency: 99} mean + that there are no record with an estimated anonymity of 4, + 5, or larger than 10. + """ + + class KMapEstimationQuasiIdValues(proto.Message): + r"""A tuple of values for the quasi-identifier columns. + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + The quasi-identifier values. + estimated_anonymity (int): + The estimated anonymity for these + quasi-identifier values. + """ + + quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + estimated_anonymity: int = proto.Field( + proto.INT64, + number=2, + ) + + class KMapEstimationHistogramBucket(proto.Message): + r"""A KMapEstimationHistogramBucket message with the following values: + min_anonymity: 3 max_anonymity: 5 frequency: 42 means that there are + 42 records whose quasi-identifier values correspond to 3, 4 or 5 + people in the overlying population. An important particular case is + when min_anonymity = max_anonymity = 1: the frequency field then + corresponds to the number of uniquely identifiable records. + + Attributes: + min_anonymity (int): + Always positive. + max_anonymity (int): + Always greater than or equal to min_anonymity. + bucket_size (int): + Number of records within these anonymity + bounds. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues]): + Sample of quasi-identifier tuple values in + this bucket. The total number of classes + returned per bucket is capped at 20. + bucket_value_count (int): + Total number of distinct quasi-identifier + tuple values in this bucket. + """ + + min_anonymity: int = proto.Field( + proto.INT64, + number=1, + ) + max_anonymity: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=5, + ) + bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=7, + ) + + k_map_estimation_histogram: MutableSequence['AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket', + ) + + class DeltaPresenceEstimationResult(proto.Message): + r"""Result of the δ-presence computation. Note that these results + are an estimation, not exact values. + + Attributes: + delta_presence_estimation_histogram (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket]): + The intervals [min_probability, max_probability) do not + overlap. If a value doesn't correspond to any such interval, + the associated frequency is zero. For example, the following + records: {min_probability: 0, max_probability: 0.1, + frequency: 17} {min_probability: 0.2, max_probability: 0.3, + frequency: 42} {min_probability: 0.3, max_probability: 0.4, + frequency: 99} mean that there are no record with an + estimated probability in [0.1, 0.2) nor larger or equal to + 0.4. + """ + + class DeltaPresenceEstimationQuasiIdValues(proto.Message): + r"""A tuple of values for the quasi-identifier columns. + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + The quasi-identifier values. + estimated_probability (float): + The estimated probability that a given individual sharing + these quasi-identifier values is in the dataset. This value, + typically called δ, is the ratio between the number of + records in the dataset with these quasi-identifier values, + and the total number of individuals (inside *and* outside + the dataset) with these quasi-identifier values. For + example, if there are 15 individuals in the dataset who + share the same quasi-identifier values, and an estimated 100 + people in the entire population with these values, then δ is + 0.15. + """ + + quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + estimated_probability: float = proto.Field( + proto.DOUBLE, + number=2, + ) + + class DeltaPresenceEstimationHistogramBucket(proto.Message): + r"""A DeltaPresenceEstimationHistogramBucket message with the following + values: min_probability: 0.1 max_probability: 0.2 frequency: 42 + means that there are 42 records for which δ is in [0.1, 0.2). An + important particular case is when min_probability = max_probability + = 1: then, every individual who shares this quasi-identifier + combination is in the dataset. + + Attributes: + min_probability (float): + Between 0 and 1. + max_probability (float): + Always greater than or equal to min_probability. + bucket_size (int): + Number of records within these probability + bounds. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues]): + Sample of quasi-identifier tuple values in + this bucket. The total number of classes + returned per bucket is capped at 20. + bucket_value_count (int): + Total number of distinct quasi-identifier + tuple values in this bucket. + """ + + min_probability: float = proto.Field( + proto.DOUBLE, + number=1, + ) + max_probability: float = proto.Field( + proto.DOUBLE, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=5, + ) + bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=7, + ) + + delta_presence_estimation_histogram: MutableSequence['AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket', + ) + + class RequestedRiskAnalysisOptions(proto.Message): + r"""Risk analysis options. + + Attributes: + job_config (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): + The job config for the risk job. + """ + + job_config: 'RiskAnalysisJobConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='RiskAnalysisJobConfig', + ) + + requested_privacy_metric: 'PrivacyMetric' = proto.Field( + proto.MESSAGE, + number=1, + message='PrivacyMetric', + ) + requested_source_table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=2, + message=storage.BigQueryTable, + ) + numerical_stats_result: NumericalStatsResult = proto.Field( + proto.MESSAGE, + number=3, + oneof='result', + message=NumericalStatsResult, + ) + categorical_stats_result: CategoricalStatsResult = proto.Field( + proto.MESSAGE, + number=4, + oneof='result', + message=CategoricalStatsResult, + ) + k_anonymity_result: KAnonymityResult = proto.Field( + proto.MESSAGE, + number=5, + oneof='result', + message=KAnonymityResult, + ) + l_diversity_result: LDiversityResult = proto.Field( + proto.MESSAGE, + number=6, + oneof='result', + message=LDiversityResult, + ) + k_map_estimation_result: KMapEstimationResult = proto.Field( + proto.MESSAGE, + number=7, + oneof='result', + message=KMapEstimationResult, + ) + delta_presence_estimation_result: DeltaPresenceEstimationResult = proto.Field( + proto.MESSAGE, + number=9, + oneof='result', + message=DeltaPresenceEstimationResult, + ) + requested_options: RequestedRiskAnalysisOptions = proto.Field( + proto.MESSAGE, + number=10, + message=RequestedRiskAnalysisOptions, + ) + + +class ValueFrequency(proto.Message): + r"""A value of a field, including its frequency. + + Attributes: + value (google.cloud.dlp_v2.types.Value): + A value contained in the field in question. + count (int): + How many times the value is contained in the + field. + """ + + value: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + count: int = proto.Field( + proto.INT64, + number=2, + ) + + +class Value(proto.Message): + r"""Set of primitive values supported by the system. Note that for the + purposes of inspection or transformation, the number of bytes + considered to comprise a 'Value' is based on its representation as a + UTF-8 encoded string. For example, if 'integer_value' is set to + 123456789, the number of bytes would be counted as 9, even though an + int64 only holds up to 8 bytes of data. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + integer_value (int): + integer + + This field is a member of `oneof`_ ``type``. + float_value (float): + float + + This field is a member of `oneof`_ ``type``. + string_value (str): + string + + This field is a member of `oneof`_ ``type``. + boolean_value (bool): + boolean + + This field is a member of `oneof`_ ``type``. + timestamp_value (google.protobuf.timestamp_pb2.Timestamp): + timestamp + + This field is a member of `oneof`_ ``type``. + time_value (google.type.timeofday_pb2.TimeOfDay): + time of day + + This field is a member of `oneof`_ ``type``. + date_value (google.type.date_pb2.Date): + date + + This field is a member of `oneof`_ ``type``. + day_of_week_value (google.type.dayofweek_pb2.DayOfWeek): + day of week + + This field is a member of `oneof`_ ``type``. + """ + + integer_value: int = proto.Field( + proto.INT64, + number=1, + oneof='type', + ) + float_value: float = proto.Field( + proto.DOUBLE, + number=2, + oneof='type', + ) + string_value: str = proto.Field( + proto.STRING, + number=3, + oneof='type', + ) + boolean_value: bool = proto.Field( + proto.BOOL, + number=4, + oneof='type', + ) + timestamp_value: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message=timestamp_pb2.Timestamp, + ) + time_value: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=6, + oneof='type', + message=timeofday_pb2.TimeOfDay, + ) + date_value: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=7, + oneof='type', + message=date_pb2.Date, + ) + day_of_week_value: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=8, + oneof='type', + enum=dayofweek_pb2.DayOfWeek, + ) + + +class QuoteInfo(proto.Message): + r"""Message for infoType-dependent details parsed from quote. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + date_time (google.cloud.dlp_v2.types.DateTime): + The date time indicated by the quote. + + This field is a member of `oneof`_ ``parsed_quote``. + """ + + date_time: 'DateTime' = proto.Field( + proto.MESSAGE, + number=2, + oneof='parsed_quote', + message='DateTime', + ) + + +class DateTime(proto.Message): + r"""Message for a date time object. + e.g. 2018-01-01, 5th August. + + Attributes: + date (google.type.date_pb2.Date): + One or more of the following must be set. + Must be a valid date or time value. + day_of_week (google.type.dayofweek_pb2.DayOfWeek): + Day of week + time (google.type.timeofday_pb2.TimeOfDay): + Time of day + time_zone (google.cloud.dlp_v2.types.DateTime.TimeZone): + Time zone + """ + + class TimeZone(proto.Message): + r"""Time zone of the date time object. + + Attributes: + offset_minutes (int): + Set only if the offset can be determined. + Positive for time ahead of UTC. E.g. For + "UTC-9", this value is -540. + """ + + offset_minutes: int = proto.Field( + proto.INT32, + number=1, + ) + + date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=1, + message=date_pb2.Date, + ) + day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=2, + enum=dayofweek_pb2.DayOfWeek, + ) + time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=3, + message=timeofday_pb2.TimeOfDay, + ) + time_zone: TimeZone = proto.Field( + proto.MESSAGE, + number=4, + message=TimeZone, + ) + + +class DeidentifyConfig(proto.Message): + r"""The configuration that controls how the data will change. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): + Treat the dataset as free-form text and apply + the same free text transformation everywhere. + + This field is a member of `oneof`_ ``transformation``. + record_transformations (google.cloud.dlp_v2.types.RecordTransformations): + Treat the dataset as structured. + Transformations can be applied to specific + locations within structured datasets, such as + transforming a column within a table. + + This field is a member of `oneof`_ ``transformation``. + image_transformations (google.cloud.dlp_v2.types.ImageTransformations): + Treat the dataset as an image and redact. + + This field is a member of `oneof`_ ``transformation``. + transformation_error_handling (google.cloud.dlp_v2.types.TransformationErrorHandling): + Mode for handling transformation errors. If left + unspecified, the default mode is + ``TransformationErrorHandling.ThrowError``. + """ + + info_type_transformations: 'InfoTypeTransformations' = proto.Field( + proto.MESSAGE, + number=1, + oneof='transformation', + message='InfoTypeTransformations', + ) + record_transformations: 'RecordTransformations' = proto.Field( + proto.MESSAGE, + number=2, + oneof='transformation', + message='RecordTransformations', + ) + image_transformations: 'ImageTransformations' = proto.Field( + proto.MESSAGE, + number=4, + oneof='transformation', + message='ImageTransformations', + ) + transformation_error_handling: 'TransformationErrorHandling' = proto.Field( + proto.MESSAGE, + number=3, + message='TransformationErrorHandling', + ) + + +class ImageTransformations(proto.Message): + r"""A type of transformation that is applied over images. + + Attributes: + transforms (MutableSequence[google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation]): + + """ + + class ImageTransformation(proto.Message): + r"""Configuration for determining how redaction of images should + occur. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + selected_info_types (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.SelectedInfoTypes): + Apply transformation to the selected info_types. + + This field is a member of `oneof`_ ``target``. + all_info_types (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.AllInfoTypes): + Apply transformation to all findings not specified in other + ImageTransformation's selected_info_types. Only one instance + is allowed within the ImageTransformations message. + + This field is a member of `oneof`_ ``target``. + all_text (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.AllText): + Apply transformation to all text that doesn't + match an infoType. Only one instance is allowed + within the ImageTransformations message. + + This field is a member of `oneof`_ ``target``. + redaction_color (google.cloud.dlp_v2.types.Color): + The color to use when redacting content from + an image. If not specified, the default is + black. + """ + + class SelectedInfoTypes(proto.Message): + r"""Apply transformation to the selected info_types. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + Required. InfoTypes to apply the + transformation to. Required. Provided InfoType + must be unique within the ImageTransformations + message. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=storage.InfoType, + ) + + class AllInfoTypes(proto.Message): + r"""Apply transformation to all findings. + """ + + class AllText(proto.Message): + r"""Apply to all text. + """ + + selected_info_types: 'ImageTransformations.ImageTransformation.SelectedInfoTypes' = proto.Field( + proto.MESSAGE, + number=4, + oneof='target', + message='ImageTransformations.ImageTransformation.SelectedInfoTypes', + ) + all_info_types: 'ImageTransformations.ImageTransformation.AllInfoTypes' = proto.Field( + proto.MESSAGE, + number=5, + oneof='target', + message='ImageTransformations.ImageTransformation.AllInfoTypes', + ) + all_text: 'ImageTransformations.ImageTransformation.AllText' = proto.Field( + proto.MESSAGE, + number=6, + oneof='target', + message='ImageTransformations.ImageTransformation.AllText', + ) + redaction_color: 'Color' = proto.Field( + proto.MESSAGE, + number=3, + message='Color', + ) + + transforms: MutableSequence[ImageTransformation] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=ImageTransformation, + ) + + +class TransformationErrorHandling(proto.Message): + r"""How to handle transformation errors during de-identification. A + transformation error occurs when the requested transformation is + incompatible with the data. For example, trying to de-identify an IP + address using a ``DateShift`` transformation would result in a + transformation error, since date info cannot be extracted from an IP + address. Information about any incompatible transformations, and how + they were handled, is returned in the response as part of the + ``TransformationOverviews``. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + throw_error (google.cloud.dlp_v2.types.TransformationErrorHandling.ThrowError): + Throw an error + + This field is a member of `oneof`_ ``mode``. + leave_untransformed (google.cloud.dlp_v2.types.TransformationErrorHandling.LeaveUntransformed): + Ignore errors + + This field is a member of `oneof`_ ``mode``. + """ + + class ThrowError(proto.Message): + r"""Throw an error and fail the request when a transformation + error occurs. + + """ + + class LeaveUntransformed(proto.Message): + r"""Skips the data without modifying it if the requested transformation + would cause an error. For example, if a ``DateShift`` transformation + were applied an an IP address, this mode would leave the IP address + unchanged in the response. + + """ + + throw_error: ThrowError = proto.Field( + proto.MESSAGE, + number=1, + oneof='mode', + message=ThrowError, + ) + leave_untransformed: LeaveUntransformed = proto.Field( + proto.MESSAGE, + number=2, + oneof='mode', + message=LeaveUntransformed, + ) + + +class PrimitiveTransformation(proto.Message): + r"""A rule for transforming a value. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + replace_config (google.cloud.dlp_v2.types.ReplaceValueConfig): + Replace with a specified value. + + This field is a member of `oneof`_ ``transformation``. + redact_config (google.cloud.dlp_v2.types.RedactConfig): + Redact + + This field is a member of `oneof`_ ``transformation``. + character_mask_config (google.cloud.dlp_v2.types.CharacterMaskConfig): + Mask + + This field is a member of `oneof`_ ``transformation``. + crypto_replace_ffx_fpe_config (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig): + Ffx-Fpe + + This field is a member of `oneof`_ ``transformation``. + fixed_size_bucketing_config (google.cloud.dlp_v2.types.FixedSizeBucketingConfig): + Fixed size bucketing + + This field is a member of `oneof`_ ``transformation``. + bucketing_config (google.cloud.dlp_v2.types.BucketingConfig): + Bucketing + + This field is a member of `oneof`_ ``transformation``. + replace_with_info_type_config (google.cloud.dlp_v2.types.ReplaceWithInfoTypeConfig): + Replace with infotype + + This field is a member of `oneof`_ ``transformation``. + time_part_config (google.cloud.dlp_v2.types.TimePartConfig): + Time extraction + + This field is a member of `oneof`_ ``transformation``. + crypto_hash_config (google.cloud.dlp_v2.types.CryptoHashConfig): + Crypto + + This field is a member of `oneof`_ ``transformation``. + date_shift_config (google.cloud.dlp_v2.types.DateShiftConfig): + Date Shift + + This field is a member of `oneof`_ ``transformation``. + crypto_deterministic_config (google.cloud.dlp_v2.types.CryptoDeterministicConfig): + Deterministic Crypto + + This field is a member of `oneof`_ ``transformation``. + replace_dictionary_config (google.cloud.dlp_v2.types.ReplaceDictionaryConfig): + Replace with a value randomly drawn (with + replacement) from a dictionary. + + This field is a member of `oneof`_ ``transformation``. + """ + + replace_config: 'ReplaceValueConfig' = proto.Field( + proto.MESSAGE, + number=1, + oneof='transformation', + message='ReplaceValueConfig', + ) + redact_config: 'RedactConfig' = proto.Field( + proto.MESSAGE, + number=2, + oneof='transformation', + message='RedactConfig', + ) + character_mask_config: 'CharacterMaskConfig' = proto.Field( + proto.MESSAGE, + number=3, + oneof='transformation', + message='CharacterMaskConfig', + ) + crypto_replace_ffx_fpe_config: 'CryptoReplaceFfxFpeConfig' = proto.Field( + proto.MESSAGE, + number=4, + oneof='transformation', + message='CryptoReplaceFfxFpeConfig', + ) + fixed_size_bucketing_config: 'FixedSizeBucketingConfig' = proto.Field( + proto.MESSAGE, + number=5, + oneof='transformation', + message='FixedSizeBucketingConfig', + ) + bucketing_config: 'BucketingConfig' = proto.Field( + proto.MESSAGE, + number=6, + oneof='transformation', + message='BucketingConfig', + ) + replace_with_info_type_config: 'ReplaceWithInfoTypeConfig' = proto.Field( + proto.MESSAGE, + number=7, + oneof='transformation', + message='ReplaceWithInfoTypeConfig', + ) + time_part_config: 'TimePartConfig' = proto.Field( + proto.MESSAGE, + number=8, + oneof='transformation', + message='TimePartConfig', + ) + crypto_hash_config: 'CryptoHashConfig' = proto.Field( + proto.MESSAGE, + number=9, + oneof='transformation', + message='CryptoHashConfig', + ) + date_shift_config: 'DateShiftConfig' = proto.Field( + proto.MESSAGE, + number=11, + oneof='transformation', + message='DateShiftConfig', + ) + crypto_deterministic_config: 'CryptoDeterministicConfig' = proto.Field( + proto.MESSAGE, + number=12, + oneof='transformation', + message='CryptoDeterministicConfig', + ) + replace_dictionary_config: 'ReplaceDictionaryConfig' = proto.Field( + proto.MESSAGE, + number=13, + oneof='transformation', + message='ReplaceDictionaryConfig', + ) + + +class TimePartConfig(proto.Message): + r"""For use with ``Date``, ``Timestamp``, and ``TimeOfDay``, extract or + preserve a portion of the value. + + Attributes: + part_to_extract (google.cloud.dlp_v2.types.TimePartConfig.TimePart): + The part of the time to keep. + """ + class TimePart(proto.Enum): + r"""Components that make up time. + + Values: + TIME_PART_UNSPECIFIED (0): + Unused + YEAR (1): + [0-9999] + MONTH (2): + [1-12] + DAY_OF_MONTH (3): + [1-31] + DAY_OF_WEEK (4): + [1-7] + WEEK_OF_YEAR (5): + [1-53] + HOUR_OF_DAY (6): + [0-23] + """ + TIME_PART_UNSPECIFIED = 0 + YEAR = 1 + MONTH = 2 + DAY_OF_MONTH = 3 + DAY_OF_WEEK = 4 + WEEK_OF_YEAR = 5 + HOUR_OF_DAY = 6 + + part_to_extract: TimePart = proto.Field( + proto.ENUM, + number=1, + enum=TimePart, + ) + + +class CryptoHashConfig(proto.Message): + r"""Pseudonymization method that generates surrogates via + cryptographic hashing. Uses SHA-256. + The key size must be either 32 or 64 bytes. + Outputs a base64 encoded representation of the hashed output + (for example, L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=). + Currently, only string and integer values can be hashed. See + https://cloud.google.com/dlp/docs/pseudonymization to learn + more. + + Attributes: + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + The key used by the hash function. + """ + + crypto_key: 'CryptoKey' = proto.Field( + proto.MESSAGE, + number=1, + message='CryptoKey', + ) + + +class CryptoDeterministicConfig(proto.Message): + r"""Pseudonymization method that generates deterministic + encryption for the given input. Outputs a base64 encoded + representation of the encrypted output. Uses AES-SIV based on + the RFC https://tools.ietf.org/html/rfc5297. + + Attributes: + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + The key used by the encryption function. For + deterministic encryption using AES-SIV, the + provided key is internally expanded to 64 bytes + prior to use. + surrogate_info_type (google.cloud.dlp_v2.types.InfoType): + The custom info type to annotate the surrogate with. This + annotation will be applied to the surrogate by prefixing it + with the name of the custom info type followed by the number + of characters comprising the surrogate. The following scheme + defines the format: {info type name}({surrogate character + count}):{surrogate} + + For example, if the name of custom info type is + 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full + replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' + + This annotation identifies the surrogate when inspecting + content using the custom info type 'Surrogate'. This + facilitates reversal of the surrogate when it occurs in free + text. + + Note: For record transformations where the entire cell in a + table is being transformed, surrogates are not mandatory. + Surrogates are used to denote the location of the token and + are necessary for re-identification in free form text. + + In order for inspection to work properly, the name of this + info type must not occur naturally anywhere in your data; + otherwise, inspection may either + + - reverse a surrogate that does not correspond to an actual + identifier + - be unable to parse the surrogate and result in an error + + Therefore, choose your custom info type name carefully after + considering what your data looks like. One way to select a + name that has a high chance of yielding reliable detection + is to include one or more unicode characters that are highly + improbable to exist in your data. For example, assuming your + data is entered from a regular ASCII keyboard, the symbol + with the hex code point 29DD might be used like so: + ⧝MY_TOKEN_TYPE. + context (google.cloud.dlp_v2.types.FieldId): + A context may be used for higher security and maintaining + referential integrity such that the same identifier in two + different contexts will be given a distinct surrogate. The + context is appended to plaintext value being encrypted. On + decryption the provided context is validated against the + value used during encryption. If a context was provided + during encryption, same context must be provided during + decryption as well. + + If the context is not set, plaintext would be used as is for + encryption. If the context is set but: + + 1. there is no record present when transforming a given + value or + 2. the field is not present when transforming a given value, + + plaintext would be used as is for encryption. + + Note that case (1) is expected when an + ``InfoTypeTransformation`` is applied to both structured and + unstructured ``ContentItem``\ s. + """ + + crypto_key: 'CryptoKey' = proto.Field( + proto.MESSAGE, + number=1, + message='CryptoKey', + ) + surrogate_info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + message=storage.InfoType, + ) + context: storage.FieldId = proto.Field( + proto.MESSAGE, + number=3, + message=storage.FieldId, + ) + + +class ReplaceValueConfig(proto.Message): + r"""Replace each input value with a given ``Value``. + + Attributes: + new_value (google.cloud.dlp_v2.types.Value): + Value to replace it with. + """ + + new_value: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + + +class ReplaceDictionaryConfig(proto.Message): + r"""Replace each input value with a value randomly selected from + the dictionary. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): + A list of words to select from for random replacement. The + `limits `__ page + contains details about the size limits of dictionaries. + + This field is a member of `oneof`_ ``type``. + """ + + word_list: storage.CustomInfoType.Dictionary.WordList = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.CustomInfoType.Dictionary.WordList, + ) + + +class ReplaceWithInfoTypeConfig(proto.Message): + r"""Replace each matching finding with the name of the info_type. + """ + + +class RedactConfig(proto.Message): + r"""Redact a given value. For example, if used with an + ``InfoTypeTransformation`` transforming PHONE_NUMBER, and input 'My + phone number is 206-555-0123', the output would be 'My phone number + is '. + + """ + + +class CharsToIgnore(proto.Message): + r"""Characters to skip when doing deidentification of a value. + These will be left alone and skipped. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + characters_to_skip (str): + Characters to not transform when masking. + + This field is a member of `oneof`_ ``characters``. + common_characters_to_ignore (google.cloud.dlp_v2.types.CharsToIgnore.CommonCharsToIgnore): + Common characters to not transform when + masking. Useful to avoid removing punctuation. + + This field is a member of `oneof`_ ``characters``. + """ + class CommonCharsToIgnore(proto.Enum): + r"""Convenience enum for indicating common characters to not + transform. + + Values: + COMMON_CHARS_TO_IGNORE_UNSPECIFIED (0): + Unused. + NUMERIC (1): + 0-9 + ALPHA_UPPER_CASE (2): + A-Z + ALPHA_LOWER_CASE (3): + a-z + PUNCTUATION (4): + US Punctuation, one of !"#$%&'()*+,-./:;<=>?@[]^_`{|}~ + WHITESPACE (5): + Whitespace character, one of [ \\t\n\x0B\f\r] + """ + COMMON_CHARS_TO_IGNORE_UNSPECIFIED = 0 + NUMERIC = 1 + ALPHA_UPPER_CASE = 2 + ALPHA_LOWER_CASE = 3 + PUNCTUATION = 4 + WHITESPACE = 5 + + characters_to_skip: str = proto.Field( + proto.STRING, + number=1, + oneof='characters', + ) + common_characters_to_ignore: CommonCharsToIgnore = proto.Field( + proto.ENUM, + number=2, + oneof='characters', + enum=CommonCharsToIgnore, + ) + + +class CharacterMaskConfig(proto.Message): + r"""Partially mask a string by replacing a given number of characters + with a fixed character. Masking can start from the beginning or end + of the string. This can be used on data of any type (numbers, longs, + and so on) and when de-identifying structured data we'll attempt to + preserve the original data's type. (This allows you to take a long + like 123 and modify it to a string like \**3. + + Attributes: + masking_character (str): + Character to use to mask the sensitive values—for example, + ``*`` for an alphabetic string such as a name, or ``0`` for + a numeric string such as ZIP code or credit card number. + This string must have a length of 1. If not supplied, this + value defaults to ``*`` for strings, and ``0`` for digits. + number_to_mask (int): + Number of characters to mask. If not set, all matching chars + will be masked. Skipped characters do not count towards this + tally. + + If ``number_to_mask`` is negative, this denotes inverse + masking. Cloud DLP masks all but a number of characters. For + example, suppose you have the following values: + + - ``masking_character`` is ``*`` + - ``number_to_mask`` is ``-4`` + - ``reverse_order`` is ``false`` + - ``CharsToIgnore`` includes ``-`` + - Input string is ``1234-5678-9012-3456`` + + The resulting de-identified string is + ``****-****-****-3456``. Cloud DLP masks all but the last + four characters. If ``reverse_order`` is ``true``, all but + the first four characters are masked as + ``1234-****-****-****``. + reverse_order (bool): + Mask characters in reverse order. For example, if + ``masking_character`` is ``0``, ``number_to_mask`` is + ``14``, and ``reverse_order`` is ``false``, then the input + string ``1234-5678-9012-3456`` is masked as + ``00000000000000-3456``. If ``masking_character`` is ``*``, + ``number_to_mask`` is ``3``, and ``reverse_order`` is + ``true``, then the string ``12345`` is masked as ``12***``. + characters_to_ignore (MutableSequence[google.cloud.dlp_v2.types.CharsToIgnore]): + When masking a string, items in this list will be skipped + when replacing characters. For example, if the input string + is ``555-555-5555`` and you instruct Cloud DLP to skip ``-`` + and mask 5 characters with ``*``, Cloud DLP returns + ``***-**5-5555``. + """ + + masking_character: str = proto.Field( + proto.STRING, + number=1, + ) + number_to_mask: int = proto.Field( + proto.INT32, + number=2, + ) + reverse_order: bool = proto.Field( + proto.BOOL, + number=3, + ) + characters_to_ignore: MutableSequence['CharsToIgnore'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='CharsToIgnore', + ) + + +class FixedSizeBucketingConfig(proto.Message): + r"""Buckets values based on fixed size ranges. The Bucketing + transformation can provide all of this functionality, but requires + more configuration. This message is provided as a convenience to the + user for simple bucketing strategies. + + The transformed value will be a hyphenated string of + {lower_bound}-{upper_bound}. For example, if lower_bound = 10 and + upper_bound = 20, all values that are within this bucket will be + replaced with "10-20". + + This can be used on data of type: double, long. + + If the bound Value type differs from the type of data being + transformed, we will first attempt converting the type of the data + to be transformed to match the type of the bound before comparing. + + See https://cloud.google.com/dlp/docs/concepts-bucketing to learn + more. + + Attributes: + lower_bound (google.cloud.dlp_v2.types.Value): + Required. Lower bound value of buckets. All values less than + ``lower_bound`` are grouped together into a single bucket; + for example if ``lower_bound`` = 10, then all values less + than 10 are replaced with the value "-10". + upper_bound (google.cloud.dlp_v2.types.Value): + Required. Upper bound value of buckets. All values greater + than upper_bound are grouped together into a single bucket; + for example if ``upper_bound`` = 89, then all values greater + than 89 are replaced with the value "89+". + bucket_size (float): + Required. Size of each bucket (except for minimum and + maximum buckets). So if ``lower_bound`` = 10, + ``upper_bound`` = 89, and ``bucket_size`` = 10, then the + following buckets would be used: -10, 10-20, 20-30, 30-40, + 40-50, 50-60, 60-70, 70-80, 80-89, 89+. Precision up to 2 + decimals works. + """ + + lower_bound: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + upper_bound: 'Value' = proto.Field( + proto.MESSAGE, + number=2, + message='Value', + ) + bucket_size: float = proto.Field( + proto.DOUBLE, + number=3, + ) + + +class BucketingConfig(proto.Message): + r"""Generalization function that buckets values based on ranges. The + ranges and replacement values are dynamically provided by the user + for custom behavior, such as 1-30 -> LOW 31-65 -> MEDIUM 66-100 -> + HIGH This can be used on data of type: number, long, string, + timestamp. If the bound ``Value`` type differs from the type of data + being transformed, we will first attempt converting the type of the + data to be transformed to match the type of the bound before + comparing. See https://cloud.google.com/dlp/docs/concepts-bucketing + to learn more. + + Attributes: + buckets (MutableSequence[google.cloud.dlp_v2.types.BucketingConfig.Bucket]): + Set of buckets. Ranges must be + non-overlapping. + """ + + class Bucket(proto.Message): + r"""Bucket is represented as a range, along with replacement + values. + + Attributes: + min_ (google.cloud.dlp_v2.types.Value): + Lower bound of the range, inclusive. Type + should be the same as max if used. + max_ (google.cloud.dlp_v2.types.Value): + Upper bound of the range, exclusive; type + must match min. + replacement_value (google.cloud.dlp_v2.types.Value): + Required. Replacement value for this bucket. + """ + + min_: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + max_: 'Value' = proto.Field( + proto.MESSAGE, + number=2, + message='Value', + ) + replacement_value: 'Value' = proto.Field( + proto.MESSAGE, + number=3, + message='Value', + ) + + buckets: MutableSequence[Bucket] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Bucket, + ) + + +class CryptoReplaceFfxFpeConfig(proto.Message): + r"""Replaces an identifier with a surrogate using Format Preserving + Encryption (FPE) with the FFX mode of operation; however when used + in the ``ReidentifyContent`` API method, it serves the opposite + function by reversing the surrogate back into the original + identifier. The identifier must be encoded as ASCII. For a given + crypto key and context, the same identifier will be replaced with + the same surrogate. Identifiers must be at least two characters + long. In the case that the identifier is the empty string, it will + be skipped. See https://cloud.google.com/dlp/docs/pseudonymization + to learn more. + + Note: We recommend using CryptoDeterministicConfig for all use cases + which do not require preserving the input alphabet space and size, + plus warrant referential integrity. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + Required. The key used by the encryption + algorithm. + context (google.cloud.dlp_v2.types.FieldId): + The 'tweak', a context may be used for higher security since + the same identifier in two different contexts won't be given + the same surrogate. If the context is not set, a default + tweak will be used. + + If the context is set but: + + 1. there is no record present when transforming a given + value or + 2. the field is not present when transforming a given value, + + a default tweak will be used. + + Note that case (1) is expected when an + ``InfoTypeTransformation`` is applied to both structured and + unstructured ``ContentItem``\ s. Currently, the referenced + field may be of value type integer or string. + + The tweak is constructed as a sequence of bytes in big + endian byte order such that: + + - a 64 bit integer is encoded followed by a single byte of + value 1 + - a string is encoded in UTF-8 format followed by a single + byte of value 2 + common_alphabet (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig.FfxCommonNativeAlphabet): + Common alphabets. + + This field is a member of `oneof`_ ``alphabet``. + custom_alphabet (str): + This is supported by mapping these to the alphanumeric + characters that the FFX mode natively supports. This happens + before/after encryption/decryption. Each character listed + must appear only once. Number of characters must be in the + range [2, 95]. This must be encoded as ASCII. The order of + characters does not matter. The full list of allowed + characters is: + 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz + ~`!@#$%^&*()_-+={[}]|:;"'<,>.?/ + + This field is a member of `oneof`_ ``alphabet``. + radix (int): + The native way to select the alphabet. Must be in the range + [2, 95]. + + This field is a member of `oneof`_ ``alphabet``. + surrogate_info_type (google.cloud.dlp_v2.types.InfoType): + The custom infoType to annotate the surrogate with. This + annotation will be applied to the surrogate by prefixing it + with the name of the custom infoType followed by the number + of characters comprising the surrogate. The following scheme + defines the format: + info_type_name(surrogate_character_count):surrogate + + For example, if the name of custom infoType is + 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full + replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' + + This annotation identifies the surrogate when inspecting + content using the custom infoType + ```SurrogateType`` `__. + This facilitates reversal of the surrogate when it occurs in + free text. + + In order for inspection to work properly, the name of this + infoType must not occur naturally anywhere in your data; + otherwise, inspection may find a surrogate that does not + correspond to an actual identifier. Therefore, choose your + custom infoType name carefully after considering what your + data looks like. One way to select a name that has a high + chance of yielding reliable detection is to include one or + more unicode characters that are highly improbable to exist + in your data. For example, assuming your data is entered + from a regular ASCII keyboard, the symbol with the hex code + point 29DD might be used like so: ⧝MY_TOKEN_TYPE + """ + class FfxCommonNativeAlphabet(proto.Enum): + r"""These are commonly used subsets of the alphabet that the FFX + mode natively supports. In the algorithm, the alphabet is + selected using the "radix". Therefore each corresponds to a + particular radix. + + Values: + FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED (0): + Unused. + NUMERIC (1): + ``[0-9]`` (radix of 10) + HEXADECIMAL (2): + ``[0-9A-F]`` (radix of 16) + UPPER_CASE_ALPHA_NUMERIC (3): + ``[0-9A-Z]`` (radix of 36) + ALPHA_NUMERIC (4): + ``[0-9A-Za-z]`` (radix of 62) + """ + FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED = 0 + NUMERIC = 1 + HEXADECIMAL = 2 + UPPER_CASE_ALPHA_NUMERIC = 3 + ALPHA_NUMERIC = 4 + + crypto_key: 'CryptoKey' = proto.Field( + proto.MESSAGE, + number=1, + message='CryptoKey', + ) + context: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + common_alphabet: FfxCommonNativeAlphabet = proto.Field( + proto.ENUM, + number=4, + oneof='alphabet', + enum=FfxCommonNativeAlphabet, + ) + custom_alphabet: str = proto.Field( + proto.STRING, + number=5, + oneof='alphabet', + ) + radix: int = proto.Field( + proto.INT32, + number=6, + oneof='alphabet', + ) + surrogate_info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=8, + message=storage.InfoType, + ) + + +class CryptoKey(proto.Message): + r"""This is a data encryption key (DEK) (as opposed to + a key encryption key (KEK) stored by Cloud Key Management + Service (Cloud KMS). + When using Cloud KMS to wrap or unwrap a DEK, be sure to set an + appropriate IAM policy on the KEK to ensure an attacker cannot + unwrap the DEK. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + transient (google.cloud.dlp_v2.types.TransientCryptoKey): + Transient crypto key + + This field is a member of `oneof`_ ``source``. + unwrapped (google.cloud.dlp_v2.types.UnwrappedCryptoKey): + Unwrapped crypto key + + This field is a member of `oneof`_ ``source``. + kms_wrapped (google.cloud.dlp_v2.types.KmsWrappedCryptoKey): + Key wrapped using Cloud KMS + + This field is a member of `oneof`_ ``source``. + """ + + transient: 'TransientCryptoKey' = proto.Field( + proto.MESSAGE, + number=1, + oneof='source', + message='TransientCryptoKey', + ) + unwrapped: 'UnwrappedCryptoKey' = proto.Field( + proto.MESSAGE, + number=2, + oneof='source', + message='UnwrappedCryptoKey', + ) + kms_wrapped: 'KmsWrappedCryptoKey' = proto.Field( + proto.MESSAGE, + number=3, + oneof='source', + message='KmsWrappedCryptoKey', + ) + + +class TransientCryptoKey(proto.Message): + r"""Use this to have a random data crypto key generated. + It will be discarded after the request finishes. + + Attributes: + name (str): + Required. Name of the key. This is an arbitrary string used + to differentiate different keys. A unique key is generated + per name: two separate ``TransientCryptoKey`` protos share + the same generated key if their names are the same. When the + data crypto key is generated, this name is not used in any + way (repeating the api call will result in a different key + being generated). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UnwrappedCryptoKey(proto.Message): + r"""Using raw keys is prone to security risks due to accidentally + leaking the key. Choose another type of key if possible. + + Attributes: + key (bytes): + Required. A 128/192/256 bit key. + """ + + key: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + +class KmsWrappedCryptoKey(proto.Message): + r"""Include to use an existing data crypto key wrapped by KMS. The + wrapped key must be a 128-, 192-, or 256-bit key. Authorization + requires the following IAM permissions when sending a request to + perform a crypto transformation using a KMS-wrapped crypto key: + dlp.kms.encrypt + + For more information, see [Creating a wrapped key] + (https://cloud.google.com/dlp/docs/create-wrapped-key). + + Note: When you use Cloud KMS for cryptographic operations, `charges + apply `__. + + Attributes: + wrapped_key (bytes): + Required. The wrapped data crypto key. + crypto_key_name (str): + Required. The resource name of the KMS + CryptoKey to use for unwrapping. + """ + + wrapped_key: bytes = proto.Field( + proto.BYTES, + number=1, + ) + crypto_key_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DateShiftConfig(proto.Message): + r"""Shifts dates by random number of days, with option to be + consistent for the same context. See + https://cloud.google.com/dlp/docs/concepts-date-shifting to + learn more. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + upper_bound_days (int): + Required. Range of shift in days. Actual + shift will be selected at random within this + range (inclusive ends). Negative means shift to + earlier in time. Must not be more than 365250 + days (1000 years) each direction. + For example, 3 means shift date to at most 3 + days into the future. + lower_bound_days (int): + Required. For example, -5 means shift date to + at most 5 days back in the past. + context (google.cloud.dlp_v2.types.FieldId): + Points to the field that contains the + context, for example, an entity id. If set, must + also set cryptoKey. If set, shift will be + consistent for the given context. + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + Causes the shift to be computed based on this key and the + context. This results in the same shift for the same context + and crypto_key. If set, must also set context. Can only be + applied to table items. + + This field is a member of `oneof`_ ``method``. + """ + + upper_bound_days: int = proto.Field( + proto.INT32, + number=1, + ) + lower_bound_days: int = proto.Field( + proto.INT32, + number=2, + ) + context: storage.FieldId = proto.Field( + proto.MESSAGE, + number=3, + message=storage.FieldId, + ) + crypto_key: 'CryptoKey' = proto.Field( + proto.MESSAGE, + number=4, + oneof='method', + message='CryptoKey', + ) + + +class InfoTypeTransformations(proto.Message): + r"""A type of transformation that will scan unstructured text and apply + various ``PrimitiveTransformation``\ s to each finding, where the + transformation is applied to only values that were identified as a + specific info_type. + + Attributes: + transformations (MutableSequence[google.cloud.dlp_v2.types.InfoTypeTransformations.InfoTypeTransformation]): + Required. Transformation for each infoType. + Cannot specify more than one for a given + infoType. + """ + + class InfoTypeTransformation(proto.Message): + r"""A transformation to apply to text that is identified as a specific + info_type. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + InfoTypes to apply the transformation to. An empty list will + cause this transformation to apply to all findings that + correspond to infoTypes that were requested in + ``InspectConfig``. + primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): + Required. Primitive transformation to apply + to the infoType. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + primitive_transformation: 'PrimitiveTransformation' = proto.Field( + proto.MESSAGE, + number=2, + message='PrimitiveTransformation', + ) + + transformations: MutableSequence[InfoTypeTransformation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=InfoTypeTransformation, + ) + + +class FieldTransformation(proto.Message): + r"""The transformation to apply to the field. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Required. Input field(s) to apply the transformation to. + When you have columns that reference their position within a + list, omit the index from the FieldId. FieldId name matching + ignores the index. For example, instead of + "contact.nums[0].type", use "contact.nums.type". + condition (google.cloud.dlp_v2.types.RecordCondition): + Only apply the transformation if the condition evaluates to + true for the given ``RecordCondition``. The conditions are + allowed to reference fields that are not used in the actual + transformation. + + Example Use Cases: + + - Apply a different bucket transformation to an age column + if the zip code column for the same record is within a + specific range. + - Redact a field if the date of birth field is greater than + 85. + primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): + Apply the transformation to the entire field. + + This field is a member of `oneof`_ ``transformation``. + info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): + Treat the contents of the field as free text, and + selectively transform content that matches an ``InfoType``. + + This field is a member of `oneof`_ ``transformation``. + """ + + fields: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + condition: 'RecordCondition' = proto.Field( + proto.MESSAGE, + number=3, + message='RecordCondition', + ) + primitive_transformation: 'PrimitiveTransformation' = proto.Field( + proto.MESSAGE, + number=4, + oneof='transformation', + message='PrimitiveTransformation', + ) + info_type_transformations: 'InfoTypeTransformations' = proto.Field( + proto.MESSAGE, + number=5, + oneof='transformation', + message='InfoTypeTransformations', + ) + + +class RecordTransformations(proto.Message): + r"""A type of transformation that is applied over structured data + such as a table. + + Attributes: + field_transformations (MutableSequence[google.cloud.dlp_v2.types.FieldTransformation]): + Transform the record by applying various + field transformations. + record_suppressions (MutableSequence[google.cloud.dlp_v2.types.RecordSuppression]): + Configuration defining which records get + suppressed entirely. Records that match any + suppression rule are omitted from the output. + """ + + field_transformations: MutableSequence['FieldTransformation'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='FieldTransformation', + ) + record_suppressions: MutableSequence['RecordSuppression'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='RecordSuppression', + ) + + +class RecordSuppression(proto.Message): + r"""Configuration to suppress records whose suppression + conditions evaluate to true. + + Attributes: + condition (google.cloud.dlp_v2.types.RecordCondition): + A condition that when it evaluates to true + will result in the record being evaluated to be + suppressed from the transformed content. + """ + + condition: 'RecordCondition' = proto.Field( + proto.MESSAGE, + number=1, + message='RecordCondition', + ) + + +class RecordCondition(proto.Message): + r"""A condition for determining whether a transformation should + be applied to a field. + + Attributes: + expressions (google.cloud.dlp_v2.types.RecordCondition.Expressions): + An expression. + """ + + class Condition(proto.Message): + r"""The field type of ``value`` and ``field`` do not need to match to be + considered equal, but not all comparisons are possible. EQUAL_TO and + NOT_EQUAL_TO attempt to compare even with incompatible types, but + all other comparisons are invalid with incompatible types. A + ``value`` of type: + + - ``string`` can be compared against all other types + - ``boolean`` can only be compared against other booleans + - ``integer`` can be compared against doubles or a string if the + string value can be parsed as an integer. + - ``double`` can be compared against integers or a string if the + string can be parsed as a double. + - ``Timestamp`` can be compared against strings in RFC 3339 date + string format. + - ``TimeOfDay`` can be compared against timestamps and strings in + the format of 'HH:mm:ss'. + + If we fail to compare do to type mismatch, a warning will be given + and the condition will evaluate to false. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Required. Field within the record this + condition is evaluated against. + operator (google.cloud.dlp_v2.types.RelationalOperator): + Required. Operator used to compare the field + or infoType to the value. + value (google.cloud.dlp_v2.types.Value): + Value to compare against. [Mandatory, except for ``EXISTS`` + tests.] + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + operator: 'RelationalOperator' = proto.Field( + proto.ENUM, + number=3, + enum='RelationalOperator', + ) + value: 'Value' = proto.Field( + proto.MESSAGE, + number=4, + message='Value', + ) + + class Conditions(proto.Message): + r"""A collection of conditions. + + Attributes: + conditions (MutableSequence[google.cloud.dlp_v2.types.RecordCondition.Condition]): + A collection of conditions. + """ + + conditions: MutableSequence['RecordCondition.Condition'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='RecordCondition.Condition', + ) + + class Expressions(proto.Message): + r"""An expression, consisting of an operator and conditions. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + logical_operator (google.cloud.dlp_v2.types.RecordCondition.Expressions.LogicalOperator): + The operator to apply to the result of conditions. Default + and currently only supported value is ``AND``. + conditions (google.cloud.dlp_v2.types.RecordCondition.Conditions): + Conditions to apply to the expression. + + This field is a member of `oneof`_ ``type``. + """ + class LogicalOperator(proto.Enum): + r"""Logical operators for conditional checks. + + Values: + LOGICAL_OPERATOR_UNSPECIFIED (0): + Unused + AND (1): + Conditional AND + """ + LOGICAL_OPERATOR_UNSPECIFIED = 0 + AND = 1 + + logical_operator: 'RecordCondition.Expressions.LogicalOperator' = proto.Field( + proto.ENUM, + number=1, + enum='RecordCondition.Expressions.LogicalOperator', + ) + conditions: 'RecordCondition.Conditions' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='RecordCondition.Conditions', + ) + + expressions: Expressions = proto.Field( + proto.MESSAGE, + number=3, + message=Expressions, + ) + + +class TransformationOverview(proto.Message): + r"""Overview of the modifications that occurred. + + Attributes: + transformed_bytes (int): + Total size in bytes that were transformed in + some way. + transformation_summaries (MutableSequence[google.cloud.dlp_v2.types.TransformationSummary]): + Transformations applied to the dataset. + """ + + transformed_bytes: int = proto.Field( + proto.INT64, + number=2, + ) + transformation_summaries: MutableSequence['TransformationSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='TransformationSummary', + ) + + +class TransformationSummary(proto.Message): + r"""Summary of a single transformation. Only one of 'transformation', + 'field_transformation', or 'record_suppress' will be set. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Set if the transformation was limited to a + specific InfoType. + field (google.cloud.dlp_v2.types.FieldId): + Set if the transformation was limited to a + specific FieldId. + transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): + The specific transformation these stats apply + to. + field_transformations (MutableSequence[google.cloud.dlp_v2.types.FieldTransformation]): + The field transformation that was applied. + If multiple field transformations are requested + for a single field, this list will contain all + of them; otherwise, only one is supplied. + record_suppress (google.cloud.dlp_v2.types.RecordSuppression): + The specific suppression option these stats + apply to. + results (MutableSequence[google.cloud.dlp_v2.types.TransformationSummary.SummaryResult]): + Collection of all transformations that took + place or had an error. + transformed_bytes (int): + Total size in bytes that were transformed in + some way. + """ + class TransformationResultCode(proto.Enum): + r"""Possible outcomes of transformations. + + Values: + TRANSFORMATION_RESULT_CODE_UNSPECIFIED (0): + Unused + SUCCESS (1): + Transformation completed without an error. + ERROR (2): + Transformation had an error. + """ + TRANSFORMATION_RESULT_CODE_UNSPECIFIED = 0 + SUCCESS = 1 + ERROR = 2 + + class SummaryResult(proto.Message): + r"""A collection that informs the user the number of times a particular + ``TransformationResultCode`` and error details occurred. + + Attributes: + count (int): + Number of transformations counted by this + result. + code (google.cloud.dlp_v2.types.TransformationSummary.TransformationResultCode): + Outcome of the transformation. + details (str): + A place for warnings or errors to show up if + a transformation didn't work as expected. + """ + + count: int = proto.Field( + proto.INT64, + number=1, + ) + code: 'TransformationSummary.TransformationResultCode' = proto.Field( + proto.ENUM, + number=2, + enum='TransformationSummary.TransformationResultCode', + ) + details: str = proto.Field( + proto.STRING, + number=3, + ) + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + transformation: 'PrimitiveTransformation' = proto.Field( + proto.MESSAGE, + number=3, + message='PrimitiveTransformation', + ) + field_transformations: MutableSequence['FieldTransformation'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='FieldTransformation', + ) + record_suppress: 'RecordSuppression' = proto.Field( + proto.MESSAGE, + number=6, + message='RecordSuppression', + ) + results: MutableSequence[SummaryResult] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=SummaryResult, + ) + transformed_bytes: int = proto.Field( + proto.INT64, + number=7, + ) + + +class TransformationDescription(proto.Message): + r"""A flattened description of a ``PrimitiveTransformation`` or + ``RecordSuppression``. + + Attributes: + type_ (google.cloud.dlp_v2.types.TransformationType): + The transformation type. + description (str): + A description of the transformation. This is empty for a + RECORD_SUPPRESSION, or is the output of calling toString() + on the ``PrimitiveTransformation`` protocol buffer message + for any other type of transformation. + condition (str): + A human-readable string representation of the + ``RecordCondition`` corresponding to this transformation. + Set if a ``RecordCondition`` was used to determine whether + or not to apply this transformation. + + Examples: \* (age_field > 85) \* (age_field <= 18) \* + (zip_field exists) \* (zip_field == 01234) && (city_field != + "Springville") \* (zip_field == 01234) && (age_field <= 18) + && (city_field exists) + info_type (google.cloud.dlp_v2.types.InfoType): + Set if the transformation was limited to a specific + ``InfoType``. + """ + + type_: 'TransformationType' = proto.Field( + proto.ENUM, + number=1, + enum='TransformationType', + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + condition: str = proto.Field( + proto.STRING, + number=3, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=4, + message=storage.InfoType, + ) + + +class TransformationDetails(proto.Message): + r"""Details about a single transformation. This object contains a + description of the transformation, information about whether the + transformation was successfully applied, and the precise + location where the transformation occurred. These details are + stored in a user-specified BigQuery table. + + Attributes: + resource_name (str): + The name of the job that completed the + transformation. + container_name (str): + The top level name of the container where the + transformation is located (this will be the + source file name or table name). + transformation (MutableSequence[google.cloud.dlp_v2.types.TransformationDescription]): + Description of transformation. This would only contain more + than one element if there were multiple matching + transformations and which one to apply was ambiguous. Not + set for states that contain no transformation, currently + only state that contains no transformation is + TransformationResultStateType.METADATA_UNRETRIEVABLE. + status_details (google.cloud.dlp_v2.types.TransformationResultStatus): + Status of the transformation, if + transformation was not successful, this will + specify what caused it to fail, otherwise it + will show that the transformation was + successful. + transformed_bytes (int): + The number of bytes that were transformed. If + transformation was unsuccessful or did not take + place because there was no content to transform, + this will be zero. + transformation_location (google.cloud.dlp_v2.types.TransformationLocation): + The precise location of the transformed + content in the original container. + """ + + resource_name: str = proto.Field( + proto.STRING, + number=1, + ) + container_name: str = proto.Field( + proto.STRING, + number=2, + ) + transformation: MutableSequence['TransformationDescription'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='TransformationDescription', + ) + status_details: 'TransformationResultStatus' = proto.Field( + proto.MESSAGE, + number=4, + message='TransformationResultStatus', + ) + transformed_bytes: int = proto.Field( + proto.INT64, + number=5, + ) + transformation_location: 'TransformationLocation' = proto.Field( + proto.MESSAGE, + number=6, + message='TransformationLocation', + ) + + +class TransformationLocation(proto.Message): + r"""Specifies the location of a transformation. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + finding_id (str): + For infotype transformations, link to the + corresponding findings ID so that location + information does not need to be duplicated. Each + findings ID correlates to an entry in the + findings output table, this table only gets + created when users specify to save findings (add + the save findings action to the request). + + This field is a member of `oneof`_ ``location_type``. + record_transformation (google.cloud.dlp_v2.types.RecordTransformation): + For record transformations, provide a field + and container information. + + This field is a member of `oneof`_ ``location_type``. + container_type (google.cloud.dlp_v2.types.TransformationContainerType): + Information about the functionality of the + container where this finding occurred, if + available. + """ + + finding_id: str = proto.Field( + proto.STRING, + number=1, + oneof='location_type', + ) + record_transformation: 'RecordTransformation' = proto.Field( + proto.MESSAGE, + number=2, + oneof='location_type', + message='RecordTransformation', + ) + container_type: 'TransformationContainerType' = proto.Field( + proto.ENUM, + number=3, + enum='TransformationContainerType', + ) + + +class RecordTransformation(proto.Message): + r""" + + Attributes: + field_id (google.cloud.dlp_v2.types.FieldId): + For record transformations, provide a field. + container_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Findings container modification timestamp, if + applicable. + container_version (str): + Container version, if available ("generation" + for Cloud Storage). + """ + + field_id: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + container_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + container_version: str = proto.Field( + proto.STRING, + number=3, + ) + + +class TransformationResultStatus(proto.Message): + r""" + + Attributes: + result_status_type (google.cloud.dlp_v2.types.TransformationResultStatusType): + Transformation result status type, this will + be either SUCCESS, or it will be the reason for + why the transformation was not completely + successful. + details (google.rpc.status_pb2.Status): + Detailed error codes and messages + """ + + result_status_type: 'TransformationResultStatusType' = proto.Field( + proto.ENUM, + number=1, + enum='TransformationResultStatusType', + ) + details: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + + +class TransformationDetailsStorageConfig(proto.Message): + r"""Config for storing transformation details. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + The BigQuery table in which to store the output. This may be + an existing table or in a new table in an existing dataset. + If table_id is not set a new one will be generated for you + with the following format: + dlp_googleapis_transformation_details_yyyy_mm_dd_[dlp_job_id]. + Pacific time zone will be used for generating the date + details. + + This field is a member of `oneof`_ ``type``. + """ + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.BigQueryTable, + ) + + +class Schedule(proto.Message): + r"""Schedule for inspect job triggers. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + recurrence_period_duration (google.protobuf.duration_pb2.Duration): + With this option a job is started on a + regular periodic basis. For example: every day + (86400 seconds). + A scheduled start time will be skipped if the + previous execution has not ended when its + scheduled time occurs. + This value must be set to a time duration + greater than or equal to 1 day and can be no + longer than 60 days. + + This field is a member of `oneof`_ ``option``. + """ + + recurrence_period_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + oneof='option', + message=duration_pb2.Duration, + ) + + +class Manual(proto.Message): + r"""Job trigger option for hybrid jobs. Jobs must be manually + created and finished. + + """ + + +class InspectTemplate(proto.Message): + r"""The inspectTemplate contains a configuration (set of types of + sensitive data to be detected) to be used anywhere you otherwise + would normally specify InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates to learn + more. + + Attributes: + name (str): + Output only. The template name. + + The template will have one of the following formats: + ``projects/PROJECT_ID/inspectTemplates/TEMPLATE_ID`` OR + ``organizations/ORGANIZATION_ID/inspectTemplates/TEMPLATE_ID``; + display_name (str): + Display name (max 256 chars). + description (str): + Short description (max 256 chars). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of an + inspectTemplate. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of an + inspectTemplate. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + The core content of the template. + Configuration of the scanning process. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=6, + message='InspectConfig', + ) + + +class DeidentifyTemplate(proto.Message): + r"""DeidentifyTemplates contains instructions on how to + de-identify content. See + https://cloud.google.com/dlp/docs/concepts-templates to learn + more. + + Attributes: + name (str): + Output only. The template name. + + The template will have one of the following formats: + ``projects/PROJECT_ID/deidentifyTemplates/TEMPLATE_ID`` OR + ``organizations/ORGANIZATION_ID/deidentifyTemplates/TEMPLATE_ID`` + display_name (str): + Display name (max 256 chars). + description (str): + Short description (max 256 chars). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of an + inspectTemplate. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of an + inspectTemplate. + deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): + The core content of the template. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + deidentify_config: 'DeidentifyConfig' = proto.Field( + proto.MESSAGE, + number=6, + message='DeidentifyConfig', + ) + + +class Error(proto.Message): + r"""Details information about an error encountered during job + execution or the results of an unsuccessful activation of the + JobTrigger. + + Attributes: + details (google.rpc.status_pb2.Status): + Detailed error codes and messages. + timestamps (MutableSequence[google.protobuf.timestamp_pb2.Timestamp]): + The times the error occurred. + """ + + details: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + timestamps: MutableSequence[timestamp_pb2.Timestamp] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class JobTrigger(proto.Message): + r"""Contains a configuration to make dlp api calls on a repeating + basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers to learn + more. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Unique resource name for the triggeredJob, assigned by the + service when the triggeredJob is created, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + display_name (str): + Display name (max 100 chars) + description (str): + User provided description (max 256 chars) + inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): + For inspect jobs, a snapshot of the + configuration. + + This field is a member of `oneof`_ ``job``. + triggers (MutableSequence[google.cloud.dlp_v2.types.JobTrigger.Trigger]): + A list of triggers which will be OR'ed + together. Only one in the list needs to trigger + for a job to be started. The list may contain + only a single Schedule trigger and must have at + least one object. + errors (MutableSequence[google.cloud.dlp_v2.types.Error]): + Output only. A stream of errors encountered + when the trigger was activated. Repeated errors + may result in the JobTrigger automatically being + paused. Will return the last 100 errors. + Whenever the JobTrigger is modified this list + will be cleared. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of a + triggeredJob. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of a + triggeredJob. + last_run_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp of the last time + this trigger executed. + status (google.cloud.dlp_v2.types.JobTrigger.Status): + Required. A status for this trigger. + """ + class Status(proto.Enum): + r"""Whether the trigger is currently active. If PAUSED or + CANCELLED, no jobs will be created with this configuration. The + service may automatically pause triggers experiencing frequent + errors. To restart a job, set the status to HEALTHY after + correcting user errors. + + Values: + STATUS_UNSPECIFIED (0): + Unused. + HEALTHY (1): + Trigger is healthy. + PAUSED (2): + Trigger is temporarily paused. + CANCELLED (3): + Trigger is cancelled and can not be resumed. + """ + STATUS_UNSPECIFIED = 0 + HEALTHY = 1 + PAUSED = 2 + CANCELLED = 3 + + class Trigger(proto.Message): + r"""What event needs to occur for a new job to be started. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + schedule (google.cloud.dlp_v2.types.Schedule): + Create a job on a repeating basis based on + the elapse of time. + + This field is a member of `oneof`_ ``trigger``. + manual (google.cloud.dlp_v2.types.Manual): + For use with hybrid jobs. Jobs must be + manually created and finished. + + This field is a member of `oneof`_ ``trigger``. + """ + + schedule: 'Schedule' = proto.Field( + proto.MESSAGE, + number=1, + oneof='trigger', + message='Schedule', + ) + manual: 'Manual' = proto.Field( + proto.MESSAGE, + number=2, + oneof='trigger', + message='Manual', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + inspect_job: 'InspectJobConfig' = proto.Field( + proto.MESSAGE, + number=4, + oneof='job', + message='InspectJobConfig', + ) + triggers: MutableSequence[Trigger] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=Trigger, + ) + errors: MutableSequence['Error'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='Error', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + last_run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + status: Status = proto.Field( + proto.ENUM, + number=10, + enum=Status, + ) + + +class Action(proto.Message): + r"""A task to execute on the completion of a job. + See https://cloud.google.com/dlp/docs/concepts-actions to learn + more. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + save_findings (google.cloud.dlp_v2.types.Action.SaveFindings): + Save resulting findings in a provided + location. + + This field is a member of `oneof`_ ``action``. + pub_sub (google.cloud.dlp_v2.types.Action.PublishToPubSub): + Publish a notification to a Pub/Sub topic. + + This field is a member of `oneof`_ ``action``. + publish_summary_to_cscc (google.cloud.dlp_v2.types.Action.PublishSummaryToCscc): + Publish summary to Cloud Security Command + Center (Alpha). + + This field is a member of `oneof`_ ``action``. + publish_findings_to_cloud_data_catalog (google.cloud.dlp_v2.types.Action.PublishFindingsToCloudDataCatalog): + Publish findings to Cloud Datahub. + + This field is a member of `oneof`_ ``action``. + deidentify (google.cloud.dlp_v2.types.Action.Deidentify): + Create a de-identified copy of the input + data. + + This field is a member of `oneof`_ ``action``. + job_notification_emails (google.cloud.dlp_v2.types.Action.JobNotificationEmails): + Sends an email when the job completes. The email goes to IAM + project owners and technical `Essential + Contacts `__. + + This field is a member of `oneof`_ ``action``. + publish_to_stackdriver (google.cloud.dlp_v2.types.Action.PublishToStackdriver): + Enable Stackdriver metric dlp.googleapis.com/finding_count. + + This field is a member of `oneof`_ ``action``. + """ + + class SaveFindings(proto.Message): + r"""If set, the detailed findings will be persisted to the + specified OutputStorageConfig. Only a single instance of this + action can be specified. + Compatible with: Inspect, Risk + + Attributes: + output_config (google.cloud.dlp_v2.types.OutputStorageConfig): + Location to store findings outside of DLP. + """ + + output_config: 'OutputStorageConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='OutputStorageConfig', + ) + + class PublishToPubSub(proto.Message): + r"""Publish a message into a given Pub/Sub topic when DlpJob has + completed. The message contains a single field, ``DlpJobName``, + which is equal to the finished job's + ```DlpJob.name`` `__. + Compatible with: Inspect, Risk + + Attributes: + topic (str): + Cloud Pub/Sub topic to send notifications to. + The topic must have given publishing access + rights to the DLP API service account executing + the long running DlpJob sending the + notifications. Format is + projects/{project}/topics/{topic}. + """ + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + + class PublishSummaryToCscc(proto.Message): + r"""Publish the result summary of a DlpJob to the Cloud Security + Command Center (CSCC Alpha). + This action is only available for projects which are parts of an + organization and whitelisted for the alpha Cloud Security + Command Center. + The action will publish the count of finding instances and their + info types. The summary of findings will be persisted in CSCC + and are governed by CSCC service-specific policy, see + https://cloud.google.com/terms/service-terms Only a single + instance of this action can be specified. Compatible with: + Inspect + + """ + + class PublishFindingsToCloudDataCatalog(proto.Message): + r"""Publish findings of a DlpJob to Data Catalog. In Data Catalog, tag + templates are applied to the resource that Cloud DLP scanned. Data + Catalog tag templates are stored in the same project and region + where the BigQuery table exists. For Cloud DLP to create and apply + the tag template, the Cloud DLP service agent must have the + ``roles/datacatalog.tagTemplateOwner`` permission on the project. + The tag template contains fields summarizing the results of the + DlpJob. Any field values previously written by another DlpJob are + deleted. [InfoType naming patterns][google.privacy.dlp.v2.InfoType] + are strictly enforced when using this feature. + + Findings are persisted in Data Catalog storage and are governed by + service-specific policies for Data Catalog. For more information, + see `Service Specific + Terms `__. + + Only a single instance of this action can be specified. This action + is allowed only if all resources being scanned are BigQuery tables. + Compatible with: Inspect + + """ + + class Deidentify(proto.Message): + r"""Create a de-identified copy of the requested table or files. + + A TransformationDetail will be created for each transformation. + + If any rows in BigQuery are skipped during de-identification + (transformation errors or row size exceeds BigQuery insert API + limits) they are placed in the failure output table. If the original + row exceeds the BigQuery insert API limit it will be truncated when + written to the failure output table. The failure output table can be + set in the + action.deidentify.output.big_query_output.deidentified_failure_output_table + field, if no table is set, a table will be automatically created in + the same project and dataset as the original table. + + Compatible with: Inspect + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + transformation_config (google.cloud.dlp_v2.types.TransformationConfig): + User specified deidentify templates and + configs for structured, unstructured, and image + files. + transformation_details_storage_config (google.cloud.dlp_v2.types.TransformationDetailsStorageConfig): + Config for storing transformation details. This is separate + from the de-identified content, and contains metadata about + the successful transformations and/or failures that occurred + while de-identifying. This needs to be set in order for + users to access information about the status of each + transformation (see + [TransformationDetails][google.privacy.dlp.v2.TransformationDetails] + message for more information about what is noted). + cloud_storage_output (str): + Required. User settable Cloud Storage bucket + and folders to store de-identified files. This + field must be set for cloud storage + deidentification. The output Cloud Storage + bucket must be different from the input bucket. + De-identified files will overwrite files in the + output path. + Form of: gs://bucket/folder/ or gs://bucket + + This field is a member of `oneof`_ ``output``. + file_types_to_transform (MutableSequence[google.cloud.dlp_v2.types.FileType]): + List of user-specified file type groups to transform. If + specified, only the files with these filetypes will be + transformed. If empty, all supported files will be + transformed. Supported types may be automatically added over + time. If a file type is set in this field that isn't + supported by the Deidentify action then the job will fail + and will not be successfully created/started. Currently the + only filetypes supported are: IMAGES, TEXT_FILES, CSV, TSV. + """ + + transformation_config: 'TransformationConfig' = proto.Field( + proto.MESSAGE, + number=7, + message='TransformationConfig', + ) + transformation_details_storage_config: 'TransformationDetailsStorageConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='TransformationDetailsStorageConfig', + ) + cloud_storage_output: str = proto.Field( + proto.STRING, + number=9, + oneof='output', + ) + file_types_to_transform: MutableSequence[storage.FileType] = proto.RepeatedField( + proto.ENUM, + number=8, + enum=storage.FileType, + ) + + class JobNotificationEmails(proto.Message): + r"""Sends an email when the job completes. The email goes to IAM project + owners and technical `Essential + Contacts `__. + + """ + + class PublishToStackdriver(proto.Message): + r"""Enable Stackdriver metric dlp.googleapis.com/finding_count. This + will publish a metric to stack driver on each infotype requested and + how many findings were found for it. CustomDetectors will be + bucketed as 'Custom' under the Stackdriver label 'info_type'. + + """ + + save_findings: SaveFindings = proto.Field( + proto.MESSAGE, + number=1, + oneof='action', + message=SaveFindings, + ) + pub_sub: PublishToPubSub = proto.Field( + proto.MESSAGE, + number=2, + oneof='action', + message=PublishToPubSub, + ) + publish_summary_to_cscc: PublishSummaryToCscc = proto.Field( + proto.MESSAGE, + number=3, + oneof='action', + message=PublishSummaryToCscc, + ) + publish_findings_to_cloud_data_catalog: PublishFindingsToCloudDataCatalog = proto.Field( + proto.MESSAGE, + number=5, + oneof='action', + message=PublishFindingsToCloudDataCatalog, + ) + deidentify: Deidentify = proto.Field( + proto.MESSAGE, + number=7, + oneof='action', + message=Deidentify, + ) + job_notification_emails: JobNotificationEmails = proto.Field( + proto.MESSAGE, + number=8, + oneof='action', + message=JobNotificationEmails, + ) + publish_to_stackdriver: PublishToStackdriver = proto.Field( + proto.MESSAGE, + number=9, + oneof='action', + message=PublishToStackdriver, + ) + + +class TransformationConfig(proto.Message): + r"""User specified templates and configs for how to deidentify + structured, unstructures, and image files. User must provide + either a unstructured deidentify template or at least one redact + image config. + + Attributes: + deidentify_template (str): + De-identify template. If this template is specified, it will + serve as the default de-identify template. This template + cannot contain ``record_transformations`` since it can be + used for unstructured content such as free-form text files. + If this template is not set, a default + ``ReplaceWithInfoTypeConfig`` will be used to de-identify + unstructured content. + structured_deidentify_template (str): + Structured de-identify template. If this template is + specified, it will serve as the de-identify template for + structured content such as delimited files and tables. If + this template is not set but the ``deidentify_template`` is + set, then ``deidentify_template`` will also apply to the + structured content. If neither template is set, a default + ``ReplaceWithInfoTypeConfig`` will be used to de-identify + structured content. + image_redact_template (str): + Image redact template. + If this template is specified, it will serve as + the de-identify template for images. If this + template is not set, all findings in the image + will be redacted with a black box. + """ + + deidentify_template: str = proto.Field( + proto.STRING, + number=1, + ) + structured_deidentify_template: str = proto.Field( + proto.STRING, + number=2, + ) + image_redact_template: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CreateInspectTemplateRequest(proto.Message): + r"""Request message for CreateInspectTemplate. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + Required. The InspectTemplate to create. + template_id (str): + The template id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_template: 'InspectTemplate' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectTemplate', + ) + template_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateInspectTemplateRequest(proto.Message): + r"""Request message for UpdateInspectTemplate. + + Attributes: + name (str): + Required. Resource name of organization and inspectTemplate + to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + New InspectTemplate value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_template: 'InspectTemplate' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectTemplate', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetInspectTemplateRequest(proto.Message): + r"""Request message for GetInspectTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListInspectTemplatesRequest(proto.Message): + r"""Request message for ListInspectTemplates. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ``ListInspectTemplates``. + page_size (int): + Size of the page, can be limited by the + server. If zero server returns a page of max + size 100. + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the template was + created. + - ``update_time``: corresponds to the time the template was + last updated. + - ``name``: corresponds to the template's name. + - ``display_name``: corresponds to the template's display + name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInspectTemplatesResponse(proto.Message): + r"""Response message for ListInspectTemplates. + + Attributes: + inspect_templates (MutableSequence[google.cloud.dlp_v2.types.InspectTemplate]): + List of inspectTemplates, up to page_size in + ListInspectTemplatesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListInspectTemplates request. + """ + + @property + def raw_page(self): + return self + + inspect_templates: MutableSequence['InspectTemplate'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='InspectTemplate', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteInspectTemplateRequest(proto.Message): + r"""Request message for DeleteInspectTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateJobTriggerRequest(proto.Message): + r"""Request message for CreateJobTrigger. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + Required. The JobTrigger to create. + trigger_id (str): + The trigger id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + job_trigger: 'JobTrigger' = proto.Field( + proto.MESSAGE, + number=2, + message='JobTrigger', + ) + trigger_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ActivateJobTriggerRequest(proto.Message): + r"""Request message for ActivateJobTrigger. + + Attributes: + name (str): + Required. Resource name of the trigger to activate, for + example ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateJobTriggerRequest(proto.Message): + r"""Request message for UpdateJobTrigger. + + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + New JobTrigger value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + job_trigger: 'JobTrigger' = proto.Field( + proto.MESSAGE, + number=2, + message='JobTrigger', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetJobTriggerRequest(proto.Message): + r"""Request message for GetJobTrigger. + + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDlpJobRequest(proto.Message): + r"""Request message for CreateDlpJobRequest. Used to initiate + long running jobs such as calculating risk metrics or inspecting + Google Cloud Storage. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): + An inspection job scans a storage repository + for InfoTypes. + + This field is a member of `oneof`_ ``job``. + risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): + A risk analysis job calculates + re-identification risk metrics for a BigQuery + table. + + This field is a member of `oneof`_ ``job``. + job_id (str): + The job id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_job: 'InspectJobConfig' = proto.Field( + proto.MESSAGE, + number=2, + oneof='job', + message='InspectJobConfig', + ) + risk_job: 'RiskAnalysisJobConfig' = proto.Field( + proto.MESSAGE, + number=3, + oneof='job', + message='RiskAnalysisJobConfig', + ) + job_id: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListJobTriggersRequest(proto.Message): + r"""Request message for ListJobTriggers. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ListJobTriggers. ``order_by`` field must not change for + subsequent calls. + page_size (int): + Size of the page, can be limited by a server. + order_by (str): + Comma separated list of triggeredJob fields to order by, + followed by ``asc`` or ``desc`` postfix. This list is + case-insensitive, default sorting order is ascending, + redundant space characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the JobTrigger + was created. + - ``update_time``: corresponds to the time the JobTrigger + was last updated. + - ``last_run_time``: corresponds to the last time the + JobTrigger ran. + - ``name``: corresponds to the JobTrigger's name. + - ``display_name``: corresponds to the JobTrigger's display + name. + - ``status``: corresponds to JobTrigger's status. + filter (str): + Allows filtering. + + Supported syntax: + + - Filter expressions are made up of one or more + restrictions. + - Restrictions can be combined by ``AND`` or ``OR`` logical + operators. A sequence of restrictions implicitly uses + ``AND``. + - A restriction has the form of + ``{field} {operator} {value}``. + - Supported fields/values for inspect triggers: + + - ``status`` - HEALTHY|PAUSED|CANCELLED + - ``inspected_storage`` - + DATASTORE|CLOUD_STORAGE|BIGQUERY + - 'last_run_time\` - RFC 3339 formatted timestamp, + surrounded by quotation marks. Nanoseconds are + ignored. + - 'error_count' - Number of errors that have occurred + while running. + + - The operator must be ``=`` or ``!=`` for status and + inspected_storage. + + Examples: + + - inspected_storage = cloud_storage AND status = HEALTHY + - inspected_storage = cloud_storage OR inspected_storage = + bigquery + - inspected_storage = cloud_storage AND (state = PAUSED OR + state = HEALTHY) + - last_run_time > "2017-12-12T00:00:00+00:00" + + The length of this field should be no more than 500 + characters. + type_ (google.cloud.dlp_v2.types.DlpJobType): + The type of jobs. Will use ``DlpJobType.INSPECT`` if not + set. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + type_: 'DlpJobType' = proto.Field( + proto.ENUM, + number=6, + enum='DlpJobType', + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ListJobTriggersResponse(proto.Message): + r"""Response message for ListJobTriggers. + + Attributes: + job_triggers (MutableSequence[google.cloud.dlp_v2.types.JobTrigger]): + List of triggeredJobs, up to page_size in + ListJobTriggersRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListJobTriggers request. + """ + + @property + def raw_page(self): + return self + + job_triggers: MutableSequence['JobTrigger'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='JobTrigger', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteJobTriggerRequest(proto.Message): + r"""Request message for DeleteJobTrigger. + + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class InspectJobConfig(proto.Message): + r"""Controls what and how to inspect for findings. + + Attributes: + storage_config (google.cloud.dlp_v2.types.StorageConfig): + The data to scan. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + How and what to scan for. + inspect_template_name (str): + If provided, will be used as the default for all values in + InspectConfig. ``inspect_config`` will be merged into the + values persisted as part of the template. + actions (MutableSequence[google.cloud.dlp_v2.types.Action]): + Actions to execute at the completion of the + job. + """ + + storage_config: storage.StorageConfig = proto.Field( + proto.MESSAGE, + number=1, + message=storage.StorageConfig, + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=3, + ) + actions: MutableSequence['Action'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='Action', + ) + + +class DataProfileAction(proto.Message): + r"""A task to execute when a data profile has been generated. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + export_data (google.cloud.dlp_v2.types.DataProfileAction.Export): + Export data profiles into a provided + location. + + This field is a member of `oneof`_ ``action``. + pub_sub_notification (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification): + Publish a message into the Pub/Sub topic. + + This field is a member of `oneof`_ ``action``. + """ + class EventType(proto.Enum): + r"""Types of event that can trigger an action. + + Values: + EVENT_TYPE_UNSPECIFIED (0): + Unused. + NEW_PROFILE (1): + New profile (not a re-profile). + CHANGED_PROFILE (2): + Changed one of the following profile metrics: + + - Table data risk score + - Table sensitivity score + - Table resource visibility + - Table encryption type + - Table predicted infoTypes + - Table other infoTypes + SCORE_INCREASED (3): + Table data risk score or sensitivity score + increased. + ERROR_CHANGED (4): + A user (non-internal) error occurred. + """ + EVENT_TYPE_UNSPECIFIED = 0 + NEW_PROFILE = 1 + CHANGED_PROFILE = 2 + SCORE_INCREASED = 3 + ERROR_CHANGED = 4 + + class Export(proto.Message): + r"""If set, the detailed data profiles will be persisted to the + location of your choice whenever updated. + + Attributes: + profile_table (google.cloud.dlp_v2.types.BigQueryTable): + Store all table and column profiles in an + existing table or a new table in an existing + dataset. Each re-generation will result in a new + row in BigQuery. + """ + + profile_table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=1, + message=storage.BigQueryTable, + ) + + class PubSubNotification(proto.Message): + r"""Send a Pub/Sub message into the given Pub/Sub topic to connect other + systems to data profile generation. The message payload data will be + the byte serialization of ``DataProfilePubSubMessage``. + + Attributes: + topic (str): + Cloud Pub/Sub topic to send notifications to. + Format is projects/{project}/topics/{topic}. + event (google.cloud.dlp_v2.types.DataProfileAction.EventType): + The type of event that triggers a Pub/Sub. At most one + ``PubSubNotification`` per EventType is permitted. + pubsub_condition (google.cloud.dlp_v2.types.DataProfilePubSubCondition): + Conditions (e.g., data risk or sensitivity + level) for triggering a Pub/Sub. + detail_of_message (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification.DetailLevel): + How much data to include in the Pub/Sub message. If the user + wishes to limit the size of the message, they can use + resource_name and fetch the profile fields they wish to. Per + table profile (not per column). + """ + class DetailLevel(proto.Enum): + r"""The levels of detail that can be included in the Pub/Sub + message. + + Values: + DETAIL_LEVEL_UNSPECIFIED (0): + Unused. + TABLE_PROFILE (1): + The full table data profile. + RESOURCE_NAME (2): + The resource name of the table. + """ + DETAIL_LEVEL_UNSPECIFIED = 0 + TABLE_PROFILE = 1 + RESOURCE_NAME = 2 + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + event: 'DataProfileAction.EventType' = proto.Field( + proto.ENUM, + number=2, + enum='DataProfileAction.EventType', + ) + pubsub_condition: 'DataProfilePubSubCondition' = proto.Field( + proto.MESSAGE, + number=3, + message='DataProfilePubSubCondition', + ) + detail_of_message: 'DataProfileAction.PubSubNotification.DetailLevel' = proto.Field( + proto.ENUM, + number=4, + enum='DataProfileAction.PubSubNotification.DetailLevel', + ) + + export_data: Export = proto.Field( + proto.MESSAGE, + number=1, + oneof='action', + message=Export, + ) + pub_sub_notification: PubSubNotification = proto.Field( + proto.MESSAGE, + number=2, + oneof='action', + message=PubSubNotification, + ) + + +class DataProfileJobConfig(proto.Message): + r"""Configuration for setting up a job to scan resources for profile + generation. Only one data profile configuration may exist per + organization, folder, or project. + + The generated data profiles are retained according to the [data + retention policy] + (https://cloud.google.com/dlp/docs/data-profiles#retention). + + Attributes: + location (google.cloud.dlp_v2.types.DataProfileLocation): + The data to scan. + project_id (str): + The project that will run the scan. The DLP + service account that exists within this project + must have access to all resources that are + profiled, and the Cloud DLP API must be enabled. + inspect_templates (MutableSequence[str]): + Detection logic for profile generation. + + Not all template features are used by profiles. + FindingLimits, include_quote and exclude_info_types have no + impact on data profiling. + + Multiple templates may be provided if there is data in + multiple regions. At most one template must be specified + per-region (including "global"). Each region is scanned + using the applicable template. If no region-specific + template is specified, but a "global" template is specified, + it will be copied to that region and used instead. If no + global or region-specific template is provided for a region + with data, that region's data will not be scanned. + + For more information, see + https://cloud.google.com/dlp/docs/data-profiles#data_residency. + data_profile_actions (MutableSequence[google.cloud.dlp_v2.types.DataProfileAction]): + Actions to execute at the completion of the + job. + """ + + location: 'DataProfileLocation' = proto.Field( + proto.MESSAGE, + number=1, + message='DataProfileLocation', + ) + project_id: str = proto.Field( + proto.STRING, + number=5, + ) + inspect_templates: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + data_profile_actions: MutableSequence['DataProfileAction'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='DataProfileAction', + ) + + +class DataProfileLocation(proto.Message): + r"""The data that will be profiled. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + organization_id (int): + The ID of an organization to scan. + + This field is a member of `oneof`_ ``location``. + folder_id (int): + The ID of the Folder within an organization + to scan. + + This field is a member of `oneof`_ ``location``. + """ + + organization_id: int = proto.Field( + proto.INT64, + number=1, + oneof='location', + ) + folder_id: int = proto.Field( + proto.INT64, + number=2, + oneof='location', + ) + + +class DlpJob(proto.Message): + r"""Combines all of the information about a DLP job. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The server-assigned name. + type_ (google.cloud.dlp_v2.types.DlpJobType): + The type of job. + state (google.cloud.dlp_v2.types.DlpJob.JobState): + State of a job. + risk_details (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails): + Results from analyzing risk of a data source. + + This field is a member of `oneof`_ ``details``. + inspect_details (google.cloud.dlp_v2.types.InspectDataSourceDetails): + Results from inspecting a data source. + + This field is a member of `oneof`_ ``details``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job was created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job finished. + job_trigger_name (str): + If created by a job trigger, the resource + name of the trigger that instantiated the job. + errors (MutableSequence[google.cloud.dlp_v2.types.Error]): + A stream of errors encountered running the + job. + """ + class JobState(proto.Enum): + r"""Possible states of a job. New items may be added. + + Values: + JOB_STATE_UNSPECIFIED (0): + Unused. + PENDING (1): + The job has not yet started. + RUNNING (2): + The job is currently running. Once a job has + finished it will transition to FAILED or DONE. + DONE (3): + The job is no longer running. + CANCELED (4): + The job was canceled before it could be + completed. + FAILED (5): + The job had an error and did not complete. + ACTIVE (6): + The job is currently accepting findings via + hybridInspect. A hybrid job in ACTIVE state may + continue to have findings added to it through + the calling of hybridInspect. After the job has + finished no more calls to hybridInspect may be + made. ACTIVE jobs can transition to DONE. + """ + JOB_STATE_UNSPECIFIED = 0 + PENDING = 1 + RUNNING = 2 + DONE = 3 + CANCELED = 4 + FAILED = 5 + ACTIVE = 6 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'DlpJobType' = proto.Field( + proto.ENUM, + number=2, + enum='DlpJobType', + ) + state: JobState = proto.Field( + proto.ENUM, + number=3, + enum=JobState, + ) + risk_details: 'AnalyzeDataSourceRiskDetails' = proto.Field( + proto.MESSAGE, + number=4, + oneof='details', + message='AnalyzeDataSourceRiskDetails', + ) + inspect_details: 'InspectDataSourceDetails' = proto.Field( + proto.MESSAGE, + number=5, + oneof='details', + message='InspectDataSourceDetails', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + job_trigger_name: str = proto.Field( + proto.STRING, + number=10, + ) + errors: MutableSequence['Error'] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message='Error', + ) + + +class GetDlpJobRequest(proto.Message): + r"""The request message for [DlpJobs.GetDlpJob][]. + + Attributes: + name (str): + Required. The name of the DlpJob resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDlpJobsRequest(proto.Message): + r"""The request message for listing DLP jobs. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + filter (str): + Allows filtering. + + Supported syntax: + + - Filter expressions are made up of one or more + restrictions. + - Restrictions can be combined by ``AND`` or ``OR`` logical + operators. A sequence of restrictions implicitly uses + ``AND``. + - A restriction has the form of + ``{field} {operator} {value}``. + - Supported fields/values for inspect jobs: + + - ``state`` - PENDING|RUNNING|CANCELED|FINISHED|FAILED + - ``inspected_storage`` - + DATASTORE|CLOUD_STORAGE|BIGQUERY + - ``trigger_name`` - The name of the trigger that + created the job. + - 'end_time\` - Corresponds to the time the job + finished. + - 'start_time\` - Corresponds to the time the job + finished. + + - Supported fields for risk analysis jobs: + + - ``state`` - RUNNING|CANCELED|FINISHED|FAILED + - 'end_time\` - Corresponds to the time the job + finished. + - 'start_time\` - Corresponds to the time the job + finished. + + - The operator must be ``=`` or ``!=``. + + Examples: + + - inspected_storage = cloud_storage AND state = done + - inspected_storage = cloud_storage OR inspected_storage = + bigquery + - inspected_storage = cloud_storage AND (state = done OR + state = canceled) + - end_time > "2017-12-12T00:00:00+00:00" + + The length of this field should be no more than 500 + characters. + page_size (int): + The standard list page size. + page_token (str): + The standard list page token. + type_ (google.cloud.dlp_v2.types.DlpJobType): + The type of job. Defaults to ``DlpJobType.INSPECT`` + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc, end_time asc, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the job was + created. + - ``end_time``: corresponds to the time the job ended. + - ``name``: corresponds to the job's name. + - ``state``: corresponds to ``state`` + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + type_: 'DlpJobType' = proto.Field( + proto.ENUM, + number=5, + enum='DlpJobType', + ) + order_by: str = proto.Field( + proto.STRING, + number=6, + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ListDlpJobsResponse(proto.Message): + r"""The response message for listing DLP jobs. + + Attributes: + jobs (MutableSequence[google.cloud.dlp_v2.types.DlpJob]): + A list of DlpJobs that matches the specified + filter in the request. + next_page_token (str): + The standard List next-page token. + """ + + @property + def raw_page(self): + return self + + jobs: MutableSequence['DlpJob'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DlpJob', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CancelDlpJobRequest(proto.Message): + r"""The request message for canceling a DLP job. + + Attributes: + name (str): + Required. The name of the DlpJob resource to + be cancelled. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FinishDlpJobRequest(proto.Message): + r"""The request message for finishing a DLP hybrid job. + + Attributes: + name (str): + Required. The name of the DlpJob resource to + be cancelled. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteDlpJobRequest(proto.Message): + r"""The request message for deleting a DLP job. + + Attributes: + name (str): + Required. The name of the DlpJob resource to + be deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDeidentifyTemplateRequest(proto.Message): + r"""Request message for CreateDeidentifyTemplate. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + Required. The DeidentifyTemplate to create. + template_id (str): + The template id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + deidentify_template: 'DeidentifyTemplate' = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyTemplate', + ) + template_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateDeidentifyTemplateRequest(proto.Message): + r"""Request message for UpdateDeidentifyTemplate. + + Attributes: + name (str): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + New DeidentifyTemplate value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + deidentify_template: 'DeidentifyTemplate' = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyTemplate', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetDeidentifyTemplateRequest(proto.Message): + r"""Request message for GetDeidentifyTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and deidentify + template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDeidentifyTemplatesRequest(proto.Message): + r"""Request message for ListDeidentifyTemplates. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ``ListDeidentifyTemplates``. + page_size (int): + Size of the page, can be limited by the + server. If zero server returns a page of max + size 100. + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the template was + created. + - ``update_time``: corresponds to the time the template was + last updated. + - ``name``: corresponds to the template's name. + - ``display_name``: corresponds to the template's display + name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDeidentifyTemplatesResponse(proto.Message): + r"""Response message for ListDeidentifyTemplates. + + Attributes: + deidentify_templates (MutableSequence[google.cloud.dlp_v2.types.DeidentifyTemplate]): + List of deidentify templates, up to page_size in + ListDeidentifyTemplatesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListDeidentifyTemplates request. + """ + + @property + def raw_page(self): + return self + + deidentify_templates: MutableSequence['DeidentifyTemplate'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DeidentifyTemplate', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteDeidentifyTemplateRequest(proto.Message): + r"""Request message for DeleteDeidentifyTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and deidentify + template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LargeCustomDictionaryConfig(proto.Message): + r"""Configuration for a custom dictionary created from a data source of + any size up to the maximum size defined in the + `limits `__ page. The artifacts + of dictionary creation are stored in the specified Cloud Storage + location. Consider using ``CustomInfoType.Dictionary`` for smaller + dictionaries that satisfy the size requirements. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + output_path (google.cloud.dlp_v2.types.CloudStoragePath): + Location to store dictionary artifacts in + Cloud Storage. These files will only be + accessible by project owners and the DLP API. If + any of these artifacts are modified, the + dictionary is considered invalid and can no + longer be used. + cloud_storage_file_set (google.cloud.dlp_v2.types.CloudStorageFileSet): + Set of files containing newline-delimited + lists of dictionary phrases. + + This field is a member of `oneof`_ ``source``. + big_query_field (google.cloud.dlp_v2.types.BigQueryField): + Field in a BigQuery table where each cell + represents a dictionary phrase. + + This field is a member of `oneof`_ ``source``. + """ + + output_path: storage.CloudStoragePath = proto.Field( + proto.MESSAGE, + number=1, + message=storage.CloudStoragePath, + ) + cloud_storage_file_set: storage.CloudStorageFileSet = proto.Field( + proto.MESSAGE, + number=2, + oneof='source', + message=storage.CloudStorageFileSet, + ) + big_query_field: storage.BigQueryField = proto.Field( + proto.MESSAGE, + number=3, + oneof='source', + message=storage.BigQueryField, + ) + + +class LargeCustomDictionaryStats(proto.Message): + r"""Summary statistics of a custom dictionary. + + Attributes: + approx_num_phrases (int): + Approximate number of distinct phrases in the + dictionary. + """ + + approx_num_phrases: int = proto.Field( + proto.INT64, + number=1, + ) + + +class StoredInfoTypeConfig(proto.Message): + r"""Configuration for stored infoTypes. All fields and subfield + are provided by the user. For more information, see + https://cloud.google.com/dlp/docs/creating-custom-infotypes. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + display_name (str): + Display name of the StoredInfoType (max 256 + characters). + description (str): + Description of the StoredInfoType (max 256 + characters). + large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryConfig): + StoredInfoType where findings are defined by + a dictionary of phrases. + + This field is a member of `oneof`_ ``type``. + dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): + Store dictionary-based CustomInfoType. + + This field is a member of `oneof`_ ``type``. + regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Store regular expression-based + StoredInfoType. + + This field is a member of `oneof`_ ``type``. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + large_custom_dictionary: 'LargeCustomDictionaryConfig' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='LargeCustomDictionaryConfig', + ) + dictionary: storage.CustomInfoType.Dictionary = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message=storage.CustomInfoType.Dictionary, + ) + regex: storage.CustomInfoType.Regex = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message=storage.CustomInfoType.Regex, + ) + + +class StoredInfoTypeStats(proto.Message): + r"""Statistics for a StoredInfoType. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryStats): + StoredInfoType where findings are defined by + a dictionary of phrases. + + This field is a member of `oneof`_ ``type``. + """ + + large_custom_dictionary: 'LargeCustomDictionaryStats' = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message='LargeCustomDictionaryStats', + ) + + +class StoredInfoTypeVersion(proto.Message): + r"""Version of a StoredInfoType, including the configuration used + to build it, create timestamp, and current state. + + Attributes: + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + StoredInfoType configuration. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Create timestamp of the version. Read-only, + determined by the system when the version is + created. + state (google.cloud.dlp_v2.types.StoredInfoTypeState): + Stored info type version state. Read-only, + updated by the system during dictionary + creation. + errors (MutableSequence[google.cloud.dlp_v2.types.Error]): + Errors that occurred when creating this storedInfoType + version, or anomalies detected in the storedInfoType data + that render it unusable. Only the five most recent errors + will be displayed, with the most recent error appearing + first. + + For example, some of the data for stored custom dictionaries + is put in the user's Cloud Storage bucket, and if this data + is modified or deleted by the user or another system, the + dictionary becomes invalid. + + If any errors occur, fix the problem indicated by the error + message and use the UpdateStoredInfoType API method to + create another version of the storedInfoType to continue + using it, reusing the same ``config`` if it was not the + source of the error. + stats (google.cloud.dlp_v2.types.StoredInfoTypeStats): + Statistics about this storedInfoType version. + """ + + config: 'StoredInfoTypeConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='StoredInfoTypeConfig', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: 'StoredInfoTypeState' = proto.Field( + proto.ENUM, + number=3, + enum='StoredInfoTypeState', + ) + errors: MutableSequence['Error'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='Error', + ) + stats: 'StoredInfoTypeStats' = proto.Field( + proto.MESSAGE, + number=5, + message='StoredInfoTypeStats', + ) + + +class StoredInfoType(proto.Message): + r"""StoredInfoType resource message that contains information + about the current version and any pending updates. + + Attributes: + name (str): + Resource name. + current_version (google.cloud.dlp_v2.types.StoredInfoTypeVersion): + Current version of the stored info type. + pending_versions (MutableSequence[google.cloud.dlp_v2.types.StoredInfoTypeVersion]): + Pending versions of the stored info type. + Empty if no versions are pending. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + current_version: 'StoredInfoTypeVersion' = proto.Field( + proto.MESSAGE, + number=2, + message='StoredInfoTypeVersion', + ) + pending_versions: MutableSequence['StoredInfoTypeVersion'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='StoredInfoTypeVersion', + ) + + +class CreateStoredInfoTypeRequest(proto.Message): + r"""Request message for CreateStoredInfoType. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Required. Configuration of the storedInfoType + to create. + stored_info_type_id (str): + The storedInfoType ID can contain uppercase and lowercase + letters, numbers, and hyphens; that is, it must match the + regular expression: ``[a-zA-Z\d-_]+``. The maximum length is + 100 characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + config: 'StoredInfoTypeConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='StoredInfoTypeConfig', + ) + stored_info_type_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateStoredInfoTypeRequest(proto.Message): + r"""Request message for UpdateStoredInfoType. + + Attributes: + name (str): + Required. Resource name of organization and storedInfoType + to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Updated configuration for the storedInfoType. + If not provided, a new version of the + storedInfoType will be created with the existing + configuration. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + config: 'StoredInfoTypeConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='StoredInfoTypeConfig', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetStoredInfoTypeRequest(proto.Message): + r"""Request message for GetStoredInfoType. + + Attributes: + name (str): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListStoredInfoTypesRequest(proto.Message): + r"""Request message for ListStoredInfoTypes. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ``ListStoredInfoTypes``. + page_size (int): + Size of the page, can be limited by the + server. If zero server returns a page of max + size 100. + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc, display_name, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the most recent + version of the resource was created. + - ``state``: corresponds to the state of the resource. + - ``name``: corresponds to resource name. + - ``display_name``: corresponds to info type's display + name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListStoredInfoTypesResponse(proto.Message): + r"""Response message for ListStoredInfoTypes. + + Attributes: + stored_info_types (MutableSequence[google.cloud.dlp_v2.types.StoredInfoType]): + List of storedInfoTypes, up to page_size in + ListStoredInfoTypesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListStoredInfoTypes request. + """ + + @property + def raw_page(self): + return self + + stored_info_types: MutableSequence['StoredInfoType'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='StoredInfoType', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteStoredInfoTypeRequest(proto.Message): + r"""Request message for DeleteStoredInfoType. + + Attributes: + name (str): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class HybridInspectJobTriggerRequest(proto.Message): + r"""Request to search for potentially sensitive info in a custom + location. + + Attributes: + name (str): + Required. Resource name of the trigger to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): + The item to inspect. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + hybrid_item: 'HybridContentItem' = proto.Field( + proto.MESSAGE, + number=3, + message='HybridContentItem', + ) + + +class HybridInspectDlpJobRequest(proto.Message): + r"""Request to search for potentially sensitive info in a custom + location. + + Attributes: + name (str): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): + The item to inspect. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + hybrid_item: 'HybridContentItem' = proto.Field( + proto.MESSAGE, + number=3, + message='HybridContentItem', + ) + + +class HybridContentItem(proto.Message): + r"""An individual hybrid item to inspect. Will be stored + temporarily during processing. + + Attributes: + item (google.cloud.dlp_v2.types.ContentItem): + The item to inspect. + finding_details (google.cloud.dlp_v2.types.HybridFindingDetails): + Supplementary information that will be added + to each finding. + """ + + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=1, + message='ContentItem', + ) + finding_details: 'HybridFindingDetails' = proto.Field( + proto.MESSAGE, + number=2, + message='HybridFindingDetails', + ) + + +class HybridFindingDetails(proto.Message): + r"""Populate to associate additional data with each finding. + + Attributes: + container_details (google.cloud.dlp_v2.types.Container): + Details about the container where the content + being inspected is from. + file_offset (int): + Offset in bytes of the line, from the + beginning of the file, where the finding is + located. Populate if the item being scanned is + only part of a bigger item, such as a shard of a + file and you want to track the absolute position + of the finding. + row_offset (int): + Offset of the row for tables. Populate if the + row(s) being scanned are part of a bigger + dataset and you want to keep track of their + absolute position. + table_options (google.cloud.dlp_v2.types.TableOptions): + If the container is a table, additional information to make + findings meaningful such as the columns that are primary + keys. If not known ahead of time, can also be set within + each inspect hybrid call and the two will be merged. Note + that identifying_fields will only be stored to BigQuery, and + only if the BigQuery action has been included. + labels (MutableMapping[str, str]): + Labels to represent user provided metadata about the data + being inspected. If configured by the job, some key values + may be required. The labels associated with ``Finding``'s + produced by hybrid inspection. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + """ + + container_details: 'Container' = proto.Field( + proto.MESSAGE, + number=1, + message='Container', + ) + file_offset: int = proto.Field( + proto.INT64, + number=2, + ) + row_offset: int = proto.Field( + proto.INT64, + number=3, + ) + table_options: storage.TableOptions = proto.Field( + proto.MESSAGE, + number=4, + message=storage.TableOptions, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + + +class HybridInspectResponse(proto.Message): + r"""Quota exceeded errors will be thrown once quota has been met. + """ + + +class DataRiskLevel(proto.Message): + r"""Score is a summary of all elements in the data profile. + A higher number means more risk. + + Attributes: + score (google.cloud.dlp_v2.types.DataRiskLevel.DataRiskLevelScore): + The score applied to the resource. + """ + class DataRiskLevelScore(proto.Enum): + r"""Various score levels for resources. + + Values: + RISK_SCORE_UNSPECIFIED (0): + Unused. + RISK_LOW (10): + Low risk - Lower indication of sensitive data + that appears to have additional access + restrictions in place or no indication of + sensitive data found. + RISK_MODERATE (20): + Medium risk - Sensitive data may be present + but additional access or fine grain access + restrictions appear to be present. Consider + limiting access even further or transform data + to mask. + RISK_HIGH (30): + High risk – SPII may be present. Access + controls may include public ACLs. Exfiltration + of data may lead to user data loss. + Re-identification of users may be possible. + Consider limiting usage and or removing SPII. + """ + RISK_SCORE_UNSPECIFIED = 0 + RISK_LOW = 10 + RISK_MODERATE = 20 + RISK_HIGH = 30 + + score: DataRiskLevelScore = proto.Field( + proto.ENUM, + number=1, + enum=DataRiskLevelScore, + ) + + +class DataProfileConfigSnapshot(proto.Message): + r"""Snapshot of the configurations used to generate the profile. + + Attributes: + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + A copy of the inspection config used to generate this + profile. This is a copy of the inspect_template specified in + ``DataProfileJobConfig``. + data_profile_job (google.cloud.dlp_v2.types.DataProfileJobConfig): + A copy of the configuration used to generate + this profile. + """ + + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + data_profile_job: 'DataProfileJobConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='DataProfileJobConfig', + ) + + +class TableDataProfile(proto.Message): + r"""The profile for a scanned table. + + Attributes: + name (str): + The name of the profile. + project_data_profile (str): + The resource name to the project data profile + for this table. + dataset_project_id (str): + The GCP project ID that owns the BigQuery + dataset. + dataset_location (str): + The BigQuery location where the dataset's + data is stored. See + https://cloud.google.com/bigquery/docs/locations + for supported locations. + dataset_id (str): + The BigQuery dataset ID. + table_id (str): + The BigQuery table ID. + full_resource (str): + The resource name of the table. + https://cloud.google.com/apis/design/resource_names#full_resource_name + profile_status (google.cloud.dlp_v2.types.ProfileStatus): + Success or error status from the most recent + profile generation attempt. May be empty if the + profile is still being generated. + state (google.cloud.dlp_v2.types.TableDataProfile.State): + State of a profile. + sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): + The sensitivity score of this table. + data_risk_level (google.cloud.dlp_v2.types.DataRiskLevel): + The data risk level of this table. + predicted_info_types (MutableSequence[google.cloud.dlp_v2.types.InfoTypeSummary]): + The infoTypes predicted from this table's + data. + other_info_types (MutableSequence[google.cloud.dlp_v2.types.OtherInfoTypeSummary]): + Other infoTypes found in this table's data. + config_snapshot (google.cloud.dlp_v2.types.DataProfileConfigSnapshot): + The snapshot of the configurations used to + generate the profile. + last_modified_time (google.protobuf.timestamp_pb2.Timestamp): + The time when this table was last modified + expiration_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time when this table expires. + scanned_column_count (int): + The number of columns profiled in the table. + failed_column_count (int): + The number of columns skipped in the table + because of an error. + table_size_bytes (int): + The size of the table when the profile was + generated. + row_count (int): + Number of rows in the table when the profile + was generated. This will not be populated for + BigLake tables. + encryption_status (google.cloud.dlp_v2.types.EncryptionStatus): + How the table is encrypted. + resource_visibility (google.cloud.dlp_v2.types.ResourceVisibility): + How broadly a resource has been shared. + profile_last_generated (google.protobuf.timestamp_pb2.Timestamp): + The last time the profile was generated. + resource_labels (MutableMapping[str, str]): + The labels applied to the resource at the + time the profile was generated. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the table was created. + """ + class State(proto.Enum): + r"""Possible states of a profile. New items may be added. + + Values: + STATE_UNSPECIFIED (0): + Unused. + RUNNING (1): + The profile is currently running. Once a + profile has finished it will transition to DONE. + DONE (2): + The profile is no longer generating. If + profile_status.status.code is 0, the profile succeeded, + otherwise, it failed. + """ + STATE_UNSPECIFIED = 0 + RUNNING = 1 + DONE = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + project_data_profile: str = proto.Field( + proto.STRING, + number=2, + ) + dataset_project_id: str = proto.Field( + proto.STRING, + number=24, + ) + dataset_location: str = proto.Field( + proto.STRING, + number=29, + ) + dataset_id: str = proto.Field( + proto.STRING, + number=25, + ) + table_id: str = proto.Field( + proto.STRING, + number=26, + ) + full_resource: str = proto.Field( + proto.STRING, + number=3, + ) + profile_status: 'ProfileStatus' = proto.Field( + proto.MESSAGE, + number=21, + message='ProfileStatus', + ) + state: State = proto.Field( + proto.ENUM, + number=22, + enum=State, + ) + sensitivity_score: storage.SensitivityScore = proto.Field( + proto.MESSAGE, + number=5, + message=storage.SensitivityScore, + ) + data_risk_level: 'DataRiskLevel' = proto.Field( + proto.MESSAGE, + number=6, + message='DataRiskLevel', + ) + predicted_info_types: MutableSequence['InfoTypeSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=27, + message='InfoTypeSummary', + ) + other_info_types: MutableSequence['OtherInfoTypeSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=28, + message='OtherInfoTypeSummary', + ) + config_snapshot: 'DataProfileConfigSnapshot' = proto.Field( + proto.MESSAGE, + number=7, + message='DataProfileConfigSnapshot', + ) + last_modified_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + expiration_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + scanned_column_count: int = proto.Field( + proto.INT64, + number=10, + ) + failed_column_count: int = proto.Field( + proto.INT64, + number=11, + ) + table_size_bytes: int = proto.Field( + proto.INT64, + number=12, + ) + row_count: int = proto.Field( + proto.INT64, + number=13, + ) + encryption_status: 'EncryptionStatus' = proto.Field( + proto.ENUM, + number=14, + enum='EncryptionStatus', + ) + resource_visibility: 'ResourceVisibility' = proto.Field( + proto.ENUM, + number=15, + enum='ResourceVisibility', + ) + profile_last_generated: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=16, + message=timestamp_pb2.Timestamp, + ) + resource_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=17, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=23, + message=timestamp_pb2.Timestamp, + ) + + +class ProfileStatus(proto.Message): + r""" + + Attributes: + status (google.rpc.status_pb2.Status): + Profiling status code and optional message + timestamp (google.protobuf.timestamp_pb2.Timestamp): + Time when the profile generation status was + updated + """ + + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class InfoTypeSummary(proto.Message): + r"""The infoType details for this column. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + The infoType. + estimated_prevalence (int): + Not populated for predicted infotypes. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + estimated_prevalence: int = proto.Field( + proto.INT32, + number=2, + ) + + +class OtherInfoTypeSummary(proto.Message): + r"""Infotype details for other infoTypes found within a column. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + The other infoType. + estimated_prevalence (int): + Approximate percentage of non-null rows that + contained data detected by this infotype. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + estimated_prevalence: int = proto.Field( + proto.INT32, + number=2, + ) + + +class DataProfilePubSubCondition(proto.Message): + r"""A condition for determining whether a Pub/Sub should be + triggered. + + Attributes: + expressions (google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubExpressions): + An expression. + """ + class ProfileScoreBucket(proto.Enum): + r"""Various score levels for resources. + + Values: + PROFILE_SCORE_BUCKET_UNSPECIFIED (0): + Unused. + HIGH (1): + High risk/sensitivity detected. + MEDIUM_OR_HIGH (2): + Medium or high risk/sensitivity detected. + """ + PROFILE_SCORE_BUCKET_UNSPECIFIED = 0 + HIGH = 1 + MEDIUM_OR_HIGH = 2 + + class PubSubCondition(proto.Message): + r"""A condition consisting of a value. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + minimum_risk_score (google.cloud.dlp_v2.types.DataProfilePubSubCondition.ProfileScoreBucket): + The minimum data risk score that triggers the + condition. + + This field is a member of `oneof`_ ``value``. + minimum_sensitivity_score (google.cloud.dlp_v2.types.DataProfilePubSubCondition.ProfileScoreBucket): + The minimum sensitivity level that triggers + the condition. + + This field is a member of `oneof`_ ``value``. + """ + + minimum_risk_score: 'DataProfilePubSubCondition.ProfileScoreBucket' = proto.Field( + proto.ENUM, + number=1, + oneof='value', + enum='DataProfilePubSubCondition.ProfileScoreBucket', + ) + minimum_sensitivity_score: 'DataProfilePubSubCondition.ProfileScoreBucket' = proto.Field( + proto.ENUM, + number=2, + oneof='value', + enum='DataProfilePubSubCondition.ProfileScoreBucket', + ) + + class PubSubExpressions(proto.Message): + r"""An expression, consisting of an operator and conditions. + + Attributes: + logical_operator (google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator): + The operator to apply to the collection of + conditions. + conditions (MutableSequence[google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubCondition]): + Conditions to apply to the expression. + """ + class PubSubLogicalOperator(proto.Enum): + r"""Logical operators for conditional checks. + + Values: + LOGICAL_OPERATOR_UNSPECIFIED (0): + Unused. + OR (1): + Conditional OR. + AND (2): + Conditional AND. + """ + LOGICAL_OPERATOR_UNSPECIFIED = 0 + OR = 1 + AND = 2 + + logical_operator: 'DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator' = proto.Field( + proto.ENUM, + number=1, + enum='DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator', + ) + conditions: MutableSequence['DataProfilePubSubCondition.PubSubCondition'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='DataProfilePubSubCondition.PubSubCondition', + ) + + expressions: PubSubExpressions = proto.Field( + proto.MESSAGE, + number=1, + message=PubSubExpressions, + ) + + +class DataProfilePubSubMessage(proto.Message): + r"""Pub/Sub topic message for a + DataProfileAction.PubSubNotification event. To receive a message + of protocol buffer schema type, convert the message data to an + object of this proto class. + + Attributes: + profile (google.cloud.dlp_v2.types.TableDataProfile): + If ``DetailLevel`` is ``TABLE_PROFILE`` this will be fully + populated. Otherwise, if ``DetailLevel`` is + ``RESOURCE_NAME``, then only ``name`` and ``full_resource`` + will be populated. + event (google.cloud.dlp_v2.types.DataProfileAction.EventType): + The event that caused the Pub/Sub message to + be sent. + """ + + profile: 'TableDataProfile' = proto.Field( + proto.MESSAGE, + number=1, + message='TableDataProfile', + ) + event: 'DataProfileAction.EventType' = proto.Field( + proto.ENUM, + number=2, + enum='DataProfileAction.EventType', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py new file mode 100644 index 00000000..fdb81846 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py @@ -0,0 +1,1474 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.privacy.dlp.v2', + manifest={ + 'Likelihood', + 'FileType', + 'InfoType', + 'SensitivityScore', + 'StoredType', + 'CustomInfoType', + 'FieldId', + 'PartitionId', + 'KindExpression', + 'DatastoreOptions', + 'CloudStorageRegexFileSet', + 'CloudStorageOptions', + 'CloudStorageFileSet', + 'CloudStoragePath', + 'BigQueryOptions', + 'StorageConfig', + 'HybridOptions', + 'BigQueryKey', + 'DatastoreKey', + 'Key', + 'RecordKey', + 'BigQueryTable', + 'BigQueryField', + 'EntityId', + 'TableOptions', + }, +) + + +class Likelihood(proto.Enum): + r"""Categorization of results based on how likely they are to + represent a match, based on the number of elements they contain + which imply a match. + + Values: + LIKELIHOOD_UNSPECIFIED (0): + Default value; same as POSSIBLE. + VERY_UNLIKELY (1): + Few matching elements. + UNLIKELY (2): + No description available. + POSSIBLE (3): + Some matching elements. + LIKELY (4): + No description available. + VERY_LIKELY (5): + Many matching elements. + """ + LIKELIHOOD_UNSPECIFIED = 0 + VERY_UNLIKELY = 1 + UNLIKELY = 2 + POSSIBLE = 3 + LIKELY = 4 + VERY_LIKELY = 5 + + +class FileType(proto.Enum): + r"""Definitions of file type groups to scan. New types will be + added to this list. + + Values: + FILE_TYPE_UNSPECIFIED (0): + Includes all files. + BINARY_FILE (1): + Includes all file extensions not covered by another entry. + Binary scanning attempts to convert the content of the file + to utf_8 to scan the file. If you wish to avoid this fall + back, specify one or more of the other FileType's in your + storage scan. + TEXT_FILE (2): + Included file extensions: + asc,asp, aspx, brf, c, cc,cfm, cgi, cpp, csv, + cxx, c++, cs, css, dart, dat, dot, eml,, + epbub, ged, go, h, hh, hpp, hxx, h++, hs, html, + htm, mkd, markdown, m, ml, mli, perl, pl, + plist, pm, php, phtml, pht, properties, py, + pyw, rb, rbw, rs, rss, rc, scala, sh, sql, + swift, tex, shtml, shtm, xhtml, lhs, ics, ini, + java, js, json, kix, kml, ocaml, md, txt, + text, tsv, vb, vcard, vcs, wml, xcodeproj, xml, + xsl, xsd, yml, yaml. + IMAGE (3): + Included file extensions: bmp, gif, jpg, jpeg, jpe, png. + bytes_limit_per_file has no effect on image files. Image + inspection is restricted to 'global', 'us', 'asia', and + 'europe'. + WORD (5): + Word files >30 MB will be scanned as binary + files. Included file extensions: + docx, dotx, docm, dotm + PDF (6): + PDF files >30 MB will be scanned as binary + files. Included file extensions: + pdf + AVRO (7): + Included file extensions: + avro + CSV (8): + Included file extensions: + csv + TSV (9): + Included file extensions: + tsv + POWERPOINT (11): + Powerpoint files >30 MB will be scanned as + binary files. Included file extensions: + pptx, pptm, potx, potm, pot + EXCEL (12): + Excel files >30 MB will be scanned as binary + files. Included file extensions: + xlsx, xlsm, xltx, xltm + """ + FILE_TYPE_UNSPECIFIED = 0 + BINARY_FILE = 1 + TEXT_FILE = 2 + IMAGE = 3 + WORD = 5 + PDF = 6 + AVRO = 7 + CSV = 8 + TSV = 9 + POWERPOINT = 11 + EXCEL = 12 + + +class InfoType(proto.Message): + r"""Type of information detected by the API. + + Attributes: + name (str): + Name of the information type. Either a name of your choosing + when creating a CustomInfoType, or one of the names listed + at https://cloud.google.com/dlp/docs/infotypes-reference + when specifying a built-in type. When sending Cloud DLP + results to Data Catalog, infoType names should conform to + the pattern ``[A-Za-z0-9$_-]{1,64}``. + version (str): + Optional version name for this InfoType. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SensitivityScore(proto.Message): + r"""Score is a summary of all elements in the data profile. + A higher number means more sensitive. + + Attributes: + score (google.cloud.dlp_v2.types.SensitivityScore.SensitivityScoreLevel): + The score applied to the resource. + """ + class SensitivityScoreLevel(proto.Enum): + r"""Various score levels for resources. + + Values: + SENSITIVITY_SCORE_UNSPECIFIED (0): + Unused. + SENSITIVITY_LOW (10): + No sensitive information detected. Limited + access. + SENSITIVITY_MODERATE (20): + Medium risk - PII, potentially sensitive + data, or fields with free-text data that are at + higher risk of having intermittent sensitive + data. Consider limiting access. + SENSITIVITY_HIGH (30): + High risk – SPII may be present. Exfiltration + of data may lead to user data loss. + Re-identification of users may be possible. + Consider limiting usage and or removing SPII. + """ + SENSITIVITY_SCORE_UNSPECIFIED = 0 + SENSITIVITY_LOW = 10 + SENSITIVITY_MODERATE = 20 + SENSITIVITY_HIGH = 30 + + score: SensitivityScoreLevel = proto.Field( + proto.ENUM, + number=1, + enum=SensitivityScoreLevel, + ) + + +class StoredType(proto.Message): + r"""A reference to a StoredInfoType to use with scanning. + + Attributes: + name (str): + Resource name of the requested ``StoredInfoType``, for + example + ``organizations/433245324/storedInfoTypes/432452342`` or + ``projects/project-id/storedInfoTypes/432452342``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp indicating when the version of the + ``StoredInfoType`` used for inspection was created. + Output-only field, populated by the system. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class CustomInfoType(proto.Message): + r"""Custom information type provided by the user. Used to find + domain-specific sensitive information configurable to the data + in question. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + CustomInfoType can either be a new infoType, or an extension + of built-in infoType, when the name matches one of existing + infoTypes and that infoType is specified in + ``InspectContent.info_types`` field. Specifying the latter + adds findings to the one detected by the system. If built-in + info type is not specified in ``InspectContent.info_types`` + list then the name is treated as a custom info type. + likelihood (google.cloud.dlp_v2.types.Likelihood): + Likelihood to return for this CustomInfoType. This base + value can be altered by a detection rule if the finding + meets the criteria specified by the rule. Defaults to + ``VERY_LIKELY`` if not specified. + dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): + A list of phrases to detect as a + CustomInfoType. + + This field is a member of `oneof`_ ``type``. + regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression based CustomInfoType. + + This field is a member of `oneof`_ ``type``. + surrogate_type (google.cloud.dlp_v2.types.CustomInfoType.SurrogateType): + Message for detecting output from + deidentification transformations that support + reversing. + + This field is a member of `oneof`_ ``type``. + stored_type (google.cloud.dlp_v2.types.StoredType): + Load an existing ``StoredInfoType`` resource for use in + ``InspectDataSource``. Not currently supported in + ``InspectContent``. + + This field is a member of `oneof`_ ``type``. + detection_rules (MutableSequence[google.cloud.dlp_v2.types.CustomInfoType.DetectionRule]): + Set of detection rules to apply to all findings of this + CustomInfoType. Rules are applied in order that they are + specified. Not supported for the ``surrogate_type`` + CustomInfoType. + exclusion_type (google.cloud.dlp_v2.types.CustomInfoType.ExclusionType): + If set to EXCLUSION_TYPE_EXCLUDE this infoType will not + cause a finding to be returned. It still can be used for + rules matching. + """ + class ExclusionType(proto.Enum): + r""" + + Values: + EXCLUSION_TYPE_UNSPECIFIED (0): + A finding of this custom info type will not + be excluded from results. + EXCLUSION_TYPE_EXCLUDE (1): + A finding of this custom info type will be + excluded from final results, but can still + affect rule execution. + """ + EXCLUSION_TYPE_UNSPECIFIED = 0 + EXCLUSION_TYPE_EXCLUDE = 1 + + class Dictionary(proto.Message): + r"""Custom information type based on a dictionary of words or phrases. + This can be used to match sensitive information specific to the + data, such as a list of employee IDs or job titles. + + Dictionary words are case-insensitive and all characters other than + letters and digits in the unicode `Basic Multilingual + Plane `__ + will be replaced with whitespace when scanning for matches, so the + dictionary phrase "Sam Johnson" will match all three phrases "sam + johnson", "Sam, Johnson", and "Sam (Johnson)". Additionally, the + characters surrounding any match must be of a different type than + the adjacent characters within the word, so letters must be next to + non-letters and digits next to non-digits. For example, the + dictionary word "jen" will match the first three letters of the text + "jen123" but will return no matches for "jennifer". + + Dictionary words containing a large number of characters that are + not letters or digits may result in unexpected findings because such + characters are treated as whitespace. The + `limits `__ page contains + details about the size limits of dictionaries. For dictionaries that + do not fit within these constraints, consider using + ``LargeCustomDictionaryConfig`` in the ``StoredInfoType`` API. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): + List of words or phrases to search for. + + This field is a member of `oneof`_ ``source``. + cloud_storage_path (google.cloud.dlp_v2.types.CloudStoragePath): + Newline-delimited file of words in Cloud + Storage. Only a single file is accepted. + + This field is a member of `oneof`_ ``source``. + """ + + class WordList(proto.Message): + r"""Message defining a list of words or phrases to search for in + the data. + + Attributes: + words (MutableSequence[str]): + Words or phrases defining the dictionary. The dictionary + must contain at least one phrase and every phrase must + contain at least 2 characters that are letters or digits. + [required] + """ + + words: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + word_list: 'CustomInfoType.Dictionary.WordList' = proto.Field( + proto.MESSAGE, + number=1, + oneof='source', + message='CustomInfoType.Dictionary.WordList', + ) + cloud_storage_path: 'CloudStoragePath' = proto.Field( + proto.MESSAGE, + number=3, + oneof='source', + message='CloudStoragePath', + ) + + class Regex(proto.Message): + r"""Message defining a custom regular expression. + + Attributes: + pattern (str): + Pattern defining the regular expression. Its + syntax + (https://github.com/google/re2/wiki/Syntax) can + be found under the google/re2 repository on + GitHub. + group_indexes (MutableSequence[int]): + The index of the submatch to extract as + findings. When not specified, the entire match + is returned. No more than 3 may be included. + """ + + pattern: str = proto.Field( + proto.STRING, + number=1, + ) + group_indexes: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=2, + ) + + class SurrogateType(proto.Message): + r"""Message for detecting output from deidentification transformations + such as + ```CryptoReplaceFfxFpeConfig`` `__. + These types of transformations are those that perform + pseudonymization, thereby producing a "surrogate" as output. This + should be used in conjunction with a field on the transformation + such as ``surrogate_info_type``. This CustomInfoType does not + support the use of ``detection_rules``. + + """ + + class DetectionRule(proto.Message): + r"""Deprecated; use ``InspectionRuleSet`` instead. Rule for modifying a + ``CustomInfoType`` to alter behavior under certain circumstances, + depending on the specific details of the rule. Not supported for the + ``surrogate_type`` custom infoType. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): + Hotword-based detection rule. + + This field is a member of `oneof`_ ``type``. + """ + + class Proximity(proto.Message): + r"""Message for specifying a window around a finding to apply a + detection rule. + + Attributes: + window_before (int): + Number of characters before the finding to consider. For + tabular data, if you want to modify the likelihood of an + entire column of findngs, set this to 1. For more + information, see [Hotword example: Set the match likelihood + of a table column] + (https://cloud.google.com/dlp/docs/creating-custom-infotypes-likelihood#match-column-values). + window_after (int): + Number of characters after the finding to + consider. + """ + + window_before: int = proto.Field( + proto.INT32, + number=1, + ) + window_after: int = proto.Field( + proto.INT32, + number=2, + ) + + class LikelihoodAdjustment(proto.Message): + r"""Message for specifying an adjustment to the likelihood of a + finding as part of a detection rule. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fixed_likelihood (google.cloud.dlp_v2.types.Likelihood): + Set the likelihood of a finding to a fixed + value. + + This field is a member of `oneof`_ ``adjustment``. + relative_likelihood (int): + Increase or decrease the likelihood by the specified number + of levels. For example, if a finding would be ``POSSIBLE`` + without the detection rule and ``relative_likelihood`` is 1, + then it is upgraded to ``LIKELY``, while a value of -1 would + downgrade it to ``UNLIKELY``. Likelihood may never drop + below ``VERY_UNLIKELY`` or exceed ``VERY_LIKELY``, so + applying an adjustment of 1 followed by an adjustment of -1 + when base likelihood is ``VERY_LIKELY`` will result in a + final likelihood of ``LIKELY``. + + This field is a member of `oneof`_ ``adjustment``. + """ + + fixed_likelihood: 'Likelihood' = proto.Field( + proto.ENUM, + number=1, + oneof='adjustment', + enum='Likelihood', + ) + relative_likelihood: int = proto.Field( + proto.INT32, + number=2, + oneof='adjustment', + ) + + class HotwordRule(proto.Message): + r"""The rule that adjusts the likelihood of findings within a + certain proximity of hotwords. + + Attributes: + hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression pattern defining what + qualifies as a hotword. + proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): + Range of characters within which the entire hotword must + reside. The total length of the window cannot exceed 1000 + characters. The finding itself will be included in the + window, so that hotwords can be used to match substrings of + the finding itself. Suppose you want Cloud DLP to promote + the likelihood of the phone number regex "(\d{3}) + \\d{3}-\d{4}" if the area code is known to be the area code + of a company's office. In this case, use the hotword regex + "(xxx)", where "xxx" is the area code in question. + + For tabular data, if you want to modify the likelihood of an + entire column of findngs, see [Hotword example: Set the + match likelihood of a table column] + (https://cloud.google.com/dlp/docs/creating-custom-infotypes-likelihood#match-column-values). + likelihood_adjustment (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.LikelihoodAdjustment): + Likelihood adjustment to apply to all + matching findings. + """ + + hotword_regex: 'CustomInfoType.Regex' = proto.Field( + proto.MESSAGE, + number=1, + message='CustomInfoType.Regex', + ) + proximity: 'CustomInfoType.DetectionRule.Proximity' = proto.Field( + proto.MESSAGE, + number=2, + message='CustomInfoType.DetectionRule.Proximity', + ) + likelihood_adjustment: 'CustomInfoType.DetectionRule.LikelihoodAdjustment' = proto.Field( + proto.MESSAGE, + number=3, + message='CustomInfoType.DetectionRule.LikelihoodAdjustment', + ) + + hotword_rule: 'CustomInfoType.DetectionRule.HotwordRule' = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message='CustomInfoType.DetectionRule.HotwordRule', + ) + + info_type: 'InfoType' = proto.Field( + proto.MESSAGE, + number=1, + message='InfoType', + ) + likelihood: 'Likelihood' = proto.Field( + proto.ENUM, + number=6, + enum='Likelihood', + ) + dictionary: Dictionary = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message=Dictionary, + ) + regex: Regex = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message=Regex, + ) + surrogate_type: SurrogateType = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message=SurrogateType, + ) + stored_type: 'StoredType' = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message='StoredType', + ) + detection_rules: MutableSequence[DetectionRule] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=DetectionRule, + ) + exclusion_type: ExclusionType = proto.Field( + proto.ENUM, + number=8, + enum=ExclusionType, + ) + + +class FieldId(proto.Message): + r"""General identifier of a data field in a storage service. + + Attributes: + name (str): + Name describing the field. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PartitionId(proto.Message): + r"""Datastore partition ID. + A partition ID identifies a grouping of entities. The grouping + is always by project and namespace, however the namespace ID may + be empty. + A partition ID contains several dimensions: + project ID and namespace ID. + + Attributes: + project_id (str): + The ID of the project to which the entities + belong. + namespace_id (str): + If not empty, the ID of the namespace to + which the entities belong. + """ + + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + namespace_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class KindExpression(proto.Message): + r"""A representation of a Datastore kind. + + Attributes: + name (str): + The name of the kind. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DatastoreOptions(proto.Message): + r"""Options defining a data set within Google Cloud Datastore. + + Attributes: + partition_id (google.cloud.dlp_v2.types.PartitionId): + A partition ID identifies a grouping of + entities. The grouping is always by project and + namespace, however the namespace ID may be + empty. + kind (google.cloud.dlp_v2.types.KindExpression): + The kind to process. + """ + + partition_id: 'PartitionId' = proto.Field( + proto.MESSAGE, + number=1, + message='PartitionId', + ) + kind: 'KindExpression' = proto.Field( + proto.MESSAGE, + number=2, + message='KindExpression', + ) + + +class CloudStorageRegexFileSet(proto.Message): + r"""Message representing a set of files in a Cloud Storage bucket. + Regular expressions are used to allow fine-grained control over + which files in the bucket to include. + + Included files are those that match at least one item in + ``include_regex`` and do not match any items in ``exclude_regex``. + Note that a file that matches items from both lists will *not* be + included. For a match to occur, the entire file path (i.e., + everything in the url after the bucket name) must match the regular + expression. + + For example, given the input + ``{bucket_name: "mybucket", include_regex: ["directory1/.*"], exclude_regex: ["directory1/excluded.*"]}``: + + - ``gs://mybucket/directory1/myfile`` will be included + - ``gs://mybucket/directory1/directory2/myfile`` will be included + (``.*`` matches across ``/``) + - ``gs://mybucket/directory0/directory1/myfile`` will *not* be + included (the full path doesn't match any items in + ``include_regex``) + - ``gs://mybucket/directory1/excludedfile`` will *not* be included + (the path matches an item in ``exclude_regex``) + + If ``include_regex`` is left empty, it will match all files by + default (this is equivalent to setting ``include_regex: [".*"]``). + + Some other common use cases: + + - ``{bucket_name: "mybucket", exclude_regex: [".*\.pdf"]}`` will + include all files in ``mybucket`` except for .pdf files + - ``{bucket_name: "mybucket", include_regex: ["directory/[^/]+"]}`` + will include all files directly under + ``gs://mybucket/directory/``, without matching across ``/`` + + Attributes: + bucket_name (str): + The name of a Cloud Storage bucket. Required. + include_regex (MutableSequence[str]): + A list of regular expressions matching file paths to + include. All files in the bucket that match at least one of + these regular expressions will be included in the set of + files, except for those that also match an item in + ``exclude_regex``. Leaving this field empty will match all + files by default (this is equivalent to including ``.*`` in + the list). + + Regular expressions use RE2 + `syntax `__; a + guide can be found under the google/re2 repository on + GitHub. + exclude_regex (MutableSequence[str]): + A list of regular expressions matching file paths to + exclude. All files in the bucket that match at least one of + these regular expressions will be excluded from the scan. + + Regular expressions use RE2 + `syntax `__; a + guide can be found under the google/re2 repository on + GitHub. + """ + + bucket_name: str = proto.Field( + proto.STRING, + number=1, + ) + include_regex: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + exclude_regex: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CloudStorageOptions(proto.Message): + r"""Options defining a file or a set of files within a Cloud + Storage bucket. + + Attributes: + file_set (google.cloud.dlp_v2.types.CloudStorageOptions.FileSet): + The set of one or more files to scan. + bytes_limit_per_file (int): + Max number of bytes to scan from a file. If a scanned file's + size is bigger than this value then the rest of the bytes + are omitted. Only one of bytes_limit_per_file and + bytes_limit_per_file_percent can be specified. Cannot be set + if de-identification is requested. + bytes_limit_per_file_percent (int): + Max percentage of bytes to scan from a file. The rest are + omitted. The number of bytes scanned is rounded down. Must + be between 0 and 100, inclusively. Both 0 and 100 means no + limit. Defaults to 0. Only one of bytes_limit_per_file and + bytes_limit_per_file_percent can be specified. Cannot be set + if de-identification is requested. + file_types (MutableSequence[google.cloud.dlp_v2.types.FileType]): + List of file type groups to include in the scan. If empty, + all files are scanned and available data format processors + are applied. In addition, the binary content of the selected + files is always scanned as well. Images are scanned only as + binary if the specified region does not support image + inspection and no file_types were specified. Image + inspection is restricted to 'global', 'us', 'asia', and + 'europe'. + sample_method (google.cloud.dlp_v2.types.CloudStorageOptions.SampleMethod): + + files_limit_percent (int): + Limits the number of files to scan to this + percentage of the input FileSet. Number of files + scanned is rounded down. Must be between 0 and + 100, inclusively. Both 0 and 100 means no limit. + Defaults to 0. + """ + class SampleMethod(proto.Enum): + r"""How to sample bytes if not all bytes are scanned. Meaningful only + when used in conjunction with bytes_limit_per_file. If not + specified, scanning would start from the top. + + Values: + SAMPLE_METHOD_UNSPECIFIED (0): + No description available. + TOP (1): + Scan from the top (default). + RANDOM_START (2): + For each file larger than bytes_limit_per_file, randomly + pick the offset to start scanning. The scanned bytes are + contiguous. + """ + SAMPLE_METHOD_UNSPECIFIED = 0 + TOP = 1 + RANDOM_START = 2 + + class FileSet(proto.Message): + r"""Set of files to scan. + + Attributes: + url (str): + The Cloud Storage url of the file(s) to scan, in the format + ``gs:///``. Trailing wildcard in the path is + allowed. + + If the url ends in a trailing slash, the bucket or directory + represented by the url will be scanned non-recursively + (content in sub-directories will not be scanned). This means + that ``gs://mybucket/`` is equivalent to + ``gs://mybucket/*``, and ``gs://mybucket/directory/`` is + equivalent to ``gs://mybucket/directory/*``. + + Exactly one of ``url`` or ``regex_file_set`` must be set. + regex_file_set (google.cloud.dlp_v2.types.CloudStorageRegexFileSet): + The regex-filtered set of files to scan. Exactly one of + ``url`` or ``regex_file_set`` must be set. + """ + + url: str = proto.Field( + proto.STRING, + number=1, + ) + regex_file_set: 'CloudStorageRegexFileSet' = proto.Field( + proto.MESSAGE, + number=2, + message='CloudStorageRegexFileSet', + ) + + file_set: FileSet = proto.Field( + proto.MESSAGE, + number=1, + message=FileSet, + ) + bytes_limit_per_file: int = proto.Field( + proto.INT64, + number=4, + ) + bytes_limit_per_file_percent: int = proto.Field( + proto.INT32, + number=8, + ) + file_types: MutableSequence['FileType'] = proto.RepeatedField( + proto.ENUM, + number=5, + enum='FileType', + ) + sample_method: SampleMethod = proto.Field( + proto.ENUM, + number=6, + enum=SampleMethod, + ) + files_limit_percent: int = proto.Field( + proto.INT32, + number=7, + ) + + +class CloudStorageFileSet(proto.Message): + r"""Message representing a set of files in Cloud Storage. + + Attributes: + url (str): + The url, in the format ``gs:///``. Trailing + wildcard in the path is allowed. + """ + + url: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CloudStoragePath(proto.Message): + r"""Message representing a single file or path in Cloud Storage. + + Attributes: + path (str): + A url representing a file or path (no wildcards) in Cloud + Storage. Example: gs://[BUCKET_NAME]/dictionary.txt + """ + + path: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BigQueryOptions(proto.Message): + r"""Options defining BigQuery table and row identifiers. + + Attributes: + table_reference (google.cloud.dlp_v2.types.BigQueryTable): + Complete BigQuery table reference. + identifying_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Table fields that may uniquely identify a row within the + table. When ``actions.saveFindings.outputConfig.table`` is + specified, the values of columns specified here are + available in the output table under + ``location.content_locations.record_location.record_key.id_values``. + Nested fields such as ``person.birthdate.year`` are allowed. + rows_limit (int): + Max number of rows to scan. If the table has more rows than + this value, the rest of the rows are omitted. If not set, or + if set to 0, all rows will be scanned. Only one of + rows_limit and rows_limit_percent can be specified. Cannot + be used in conjunction with TimespanConfig. + rows_limit_percent (int): + Max percentage of rows to scan. The rest are omitted. The + number of rows scanned is rounded down. Must be between 0 + and 100, inclusively. Both 0 and 100 means no limit. + Defaults to 0. Only one of rows_limit and rows_limit_percent + can be specified. Cannot be used in conjunction with + TimespanConfig. + sample_method (google.cloud.dlp_v2.types.BigQueryOptions.SampleMethod): + + excluded_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + References to fields excluded from scanning. + This allows you to skip inspection of entire + columns which you know have no findings. + included_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Limit scanning only to these fields. + """ + class SampleMethod(proto.Enum): + r"""How to sample rows if not all rows are scanned. Meaningful only when + used in conjunction with either rows_limit or rows_limit_percent. If + not specified, rows are scanned in the order BigQuery reads them. + + Values: + SAMPLE_METHOD_UNSPECIFIED (0): + No description available. + TOP (1): + Scan groups of rows in the order BigQuery + provides (default). Multiple groups of rows may + be scanned in parallel, so results may not + appear in the same order the rows are read. + RANDOM_START (2): + Randomly pick groups of rows to scan. + """ + SAMPLE_METHOD_UNSPECIFIED = 0 + TOP = 1 + RANDOM_START = 2 + + table_reference: 'BigQueryTable' = proto.Field( + proto.MESSAGE, + number=1, + message='BigQueryTable', + ) + identifying_fields: MutableSequence['FieldId'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='FieldId', + ) + rows_limit: int = proto.Field( + proto.INT64, + number=3, + ) + rows_limit_percent: int = proto.Field( + proto.INT32, + number=6, + ) + sample_method: SampleMethod = proto.Field( + proto.ENUM, + number=4, + enum=SampleMethod, + ) + excluded_fields: MutableSequence['FieldId'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='FieldId', + ) + included_fields: MutableSequence['FieldId'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='FieldId', + ) + + +class StorageConfig(proto.Message): + r"""Shared message indicating Cloud storage type. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + datastore_options (google.cloud.dlp_v2.types.DatastoreOptions): + Google Cloud Datastore options. + + This field is a member of `oneof`_ ``type``. + cloud_storage_options (google.cloud.dlp_v2.types.CloudStorageOptions): + Cloud Storage options. + + This field is a member of `oneof`_ ``type``. + big_query_options (google.cloud.dlp_v2.types.BigQueryOptions): + BigQuery options. + + This field is a member of `oneof`_ ``type``. + hybrid_options (google.cloud.dlp_v2.types.HybridOptions): + Hybrid inspection options. + + This field is a member of `oneof`_ ``type``. + timespan_config (google.cloud.dlp_v2.types.StorageConfig.TimespanConfig): + + """ + + class TimespanConfig(proto.Message): + r"""Configuration of the timespan of the items to include in + scanning. Currently only supported when inspecting Cloud Storage + and BigQuery. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Exclude files, tables, or rows older than + this value. If not set, no lower time limit is + applied. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Exclude files, tables, or rows newer than + this value. If not set, no upper time limit is + applied. + timestamp_field (google.cloud.dlp_v2.types.FieldId): + Specification of the field containing the timestamp of + scanned items. Used for data sources like Datastore and + BigQuery. + + For BigQuery + + If this value is not specified and the table was modified + between the given start and end times, the entire table will + be scanned. If this value is specified, then rows are + filtered based on the given start and end times. Rows with a + ``NULL`` value in the provided BigQuery column are skipped. + Valid data types of the provided BigQuery column are: + ``INTEGER``, ``DATE``, ``TIMESTAMP``, and ``DATETIME``. + + If your BigQuery table is `partitioned at ingestion + time `__, + you can use any of the following pseudo-columns as your + timestamp field. When used with Cloud DLP, these + pseudo-column names are case sensitive. + + .. raw:: html + +
    +
  • _PARTITIONTIME
  • +
  • _PARTITIONDATE
  • +
  • _PARTITION_LOAD_TIME
  • +
+ + For Datastore + + If this value is specified, then entities are filtered based + on the given start and end times. If an entity does not + contain the provided timestamp property or contains empty or + invalid values, then it is included. Valid data types of the + provided timestamp property are: ``TIMESTAMP``. + + See the `known + issue `__ + related to this operation. + enable_auto_population_of_timespan_config (bool): + When the job is started by a JobTrigger we will + automatically figure out a valid start_time to avoid + scanning files that have not been modified since the last + time the JobTrigger executed. This will be based on the time + of the execution of the last run of the JobTrigger or the + timespan end_time used in the last run of the JobTrigger. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + timestamp_field: 'FieldId' = proto.Field( + proto.MESSAGE, + number=3, + message='FieldId', + ) + enable_auto_population_of_timespan_config: bool = proto.Field( + proto.BOOL, + number=4, + ) + + datastore_options: 'DatastoreOptions' = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message='DatastoreOptions', + ) + cloud_storage_options: 'CloudStorageOptions' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='CloudStorageOptions', + ) + big_query_options: 'BigQueryOptions' = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message='BigQueryOptions', + ) + hybrid_options: 'HybridOptions' = proto.Field( + proto.MESSAGE, + number=9, + oneof='type', + message='HybridOptions', + ) + timespan_config: TimespanConfig = proto.Field( + proto.MESSAGE, + number=6, + message=TimespanConfig, + ) + + +class HybridOptions(proto.Message): + r"""Configuration to control jobs where the content being + inspected is outside of Google Cloud Platform. + + Attributes: + description (str): + A short description of where the data is + coming from. Will be stored once in the job. 256 + max length. + required_finding_label_keys (MutableSequence[str]): + These are labels that each inspection request must include + within their 'finding_labels' map. Request may contain + others, but any missing one of these will be rejected. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + No more than 10 keys can be required. + labels (MutableMapping[str, str]): + To organize findings, these labels will be added to each + finding. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + table_options (google.cloud.dlp_v2.types.TableOptions): + If the container is a table, additional + information to make findings meaningful such as + the columns that are primary keys. + """ + + description: str = proto.Field( + proto.STRING, + number=1, + ) + required_finding_label_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + table_options: 'TableOptions' = proto.Field( + proto.MESSAGE, + number=4, + message='TableOptions', + ) + + +class BigQueryKey(proto.Message): + r"""Row key for identifying a record in BigQuery table. + + Attributes: + table_reference (google.cloud.dlp_v2.types.BigQueryTable): + Complete BigQuery table reference. + row_number (int): + Row number inferred at the time the table was scanned. This + value is nondeterministic, cannot be queried, and may be + null for inspection jobs. To locate findings within a table, + specify + ``inspect_job.storage_config.big_query_options.identifying_fields`` + in ``CreateDlpJobRequest``. + """ + + table_reference: 'BigQueryTable' = proto.Field( + proto.MESSAGE, + number=1, + message='BigQueryTable', + ) + row_number: int = proto.Field( + proto.INT64, + number=2, + ) + + +class DatastoreKey(proto.Message): + r"""Record key for a finding in Cloud Datastore. + + Attributes: + entity_key (google.cloud.dlp_v2.types.Key): + Datastore entity key. + """ + + entity_key: 'Key' = proto.Field( + proto.MESSAGE, + number=1, + message='Key', + ) + + +class Key(proto.Message): + r"""A unique identifier for a Datastore entity. + If a key's partition ID or any of its path kinds or names are + reserved/read-only, the key is reserved/read-only. + A reserved/read-only key is forbidden in certain documented + contexts. + + Attributes: + partition_id (google.cloud.dlp_v2.types.PartitionId): + Entities are partitioned into subsets, + currently identified by a project ID and + namespace ID. Queries are scoped to a single + partition. + path (MutableSequence[google.cloud.dlp_v2.types.Key.PathElement]): + The entity path. An entity path consists of one or more + elements composed of a kind and a string or numerical + identifier, which identify entities. The first element + identifies a *root entity*, the second element identifies a + *child* of the root entity, the third element identifies a + child of the second entity, and so forth. The entities + identified by all prefixes of the path are called the + element's *ancestors*. + + A path can never be empty, and a path can have at most 100 + elements. + """ + + class PathElement(proto.Message): + r"""A (kind, ID/name) pair used to construct a key path. + If either name or ID is set, the element is complete. If neither + is set, the element is incomplete. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kind (str): + The kind of the entity. A kind matching regex ``__.*__`` is + reserved/read-only. A kind must not contain more than 1500 + bytes when UTF-8 encoded. Cannot be ``""``. + id (int): + The auto-allocated ID of the entity. + Never equal to zero. Values less than zero are + discouraged and may not be supported in the + future. + + This field is a member of `oneof`_ ``id_type``. + name (str): + The name of the entity. A name matching regex ``__.*__`` is + reserved/read-only. A name must not be more than 1500 bytes + when UTF-8 encoded. Cannot be ``""``. + + This field is a member of `oneof`_ ``id_type``. + """ + + kind: str = proto.Field( + proto.STRING, + number=1, + ) + id: int = proto.Field( + proto.INT64, + number=2, + oneof='id_type', + ) + name: str = proto.Field( + proto.STRING, + number=3, + oneof='id_type', + ) + + partition_id: 'PartitionId' = proto.Field( + proto.MESSAGE, + number=1, + message='PartitionId', + ) + path: MutableSequence[PathElement] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=PathElement, + ) + + +class RecordKey(proto.Message): + r"""Message for a unique key indicating a record that contains a + finding. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + datastore_key (google.cloud.dlp_v2.types.DatastoreKey): + + This field is a member of `oneof`_ ``type``. + big_query_key (google.cloud.dlp_v2.types.BigQueryKey): + + This field is a member of `oneof`_ ``type``. + id_values (MutableSequence[str]): + Values of identifying columns in the given row. Order of + values matches the order of ``identifying_fields`` specified + in the scanning request. + """ + + datastore_key: 'DatastoreKey' = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message='DatastoreKey', + ) + big_query_key: 'BigQueryKey' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='BigQueryKey', + ) + id_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +class BigQueryTable(proto.Message): + r"""Message defining the location of a BigQuery table. A table is + uniquely identified by its project_id, dataset_id, and table_name. + Within a query a table is often referenced with a string in the + format of: ``:.`` or + ``..``. + + Attributes: + project_id (str): + The Google Cloud Platform project ID of the + project containing the table. If omitted, + project ID is inferred from the API call. + dataset_id (str): + Dataset ID of the table. + table_id (str): + Name of the table. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + dataset_id: str = proto.Field( + proto.STRING, + number=2, + ) + table_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class BigQueryField(proto.Message): + r"""Message defining a field of a BigQuery table. + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Source table of the field. + field (google.cloud.dlp_v2.types.FieldId): + Designated field in the BigQuery table. + """ + + table: 'BigQueryTable' = proto.Field( + proto.MESSAGE, + number=1, + message='BigQueryTable', + ) + field: 'FieldId' = proto.Field( + proto.MESSAGE, + number=2, + message='FieldId', + ) + + +class EntityId(proto.Message): + r"""An entity in a dataset is a field or set of fields that correspond + to a single person. For example, in medical records the ``EntityId`` + might be a patient identifier, or for financial records it might be + an account identifier. This message is used when generalizations or + analysis must take into account that multiple rows correspond to the + same entity. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Composite key indicating which field contains + the entity identifier. + """ + + field: 'FieldId' = proto.Field( + proto.MESSAGE, + number=1, + message='FieldId', + ) + + +class TableOptions(proto.Message): + r"""Instructions regarding the table content being inspected. + + Attributes: + identifying_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + The columns that are the primary keys for + table objects included in ContentItem. A copy of + this cell's value will stored alongside + alongside each finding so that the finding can + be traced to the specific row it came from. No + more than 3 may be provided. + """ + + identifying_fields: MutableSequence['FieldId'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='FieldId', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/mypy.ini b/owl-bot-staging/v2/mypy.ini new file mode 100644 index 00000000..574c5aed --- /dev/null +++ b/owl-bot-staging/v2/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/v2/noxfile.py b/owl-bot-staging/v2/noxfile.py new file mode 100644 index 00000000..6b1462df --- /dev/null +++ b/owl-bot-staging/v2/noxfile.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.11" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/dlp_v2/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py new file mode 100644 index 00000000..e4371abf --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ActivateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ActivateJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_activate_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ActivateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.activate_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ActivateJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py new file mode 100644 index 00000000..c0b4fac1 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ActivateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ActivateJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_activate_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ActivateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.activate_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ActivateJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py new file mode 100644 index 00000000..d8190299 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CancelDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_cancel_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CancelDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_CancelDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py new file mode 100644 index 00000000..7475d6fa --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CancelDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_cancel_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CancelDlpJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_CancelDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py new file mode 100644 index 00000000..81ad2519 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDeidentifyTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py new file mode 100644 index 00000000..b394f634 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDeidentifyTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py new file mode 100644 index 00000000..28770717 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDlpJobRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py new file mode 100644 index 00000000..779754f6 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDlpJobRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py new file mode 100644 index 00000000..aeb40676 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateInspectTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateInspectTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py new file mode 100644 index 00000000..0e344b36 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateInspectTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateInspectTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py new file mode 100644 index 00000000..3e82b8f3 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + job_trigger = dlp_v2.JobTrigger() + job_trigger.status = "CANCELLED" + + request = dlp_v2.CreateJobTriggerRequest( + parent="parent_value", + job_trigger=job_trigger, + ) + + # Make the request + response = await client.create_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py new file mode 100644 index 00000000..ebb74284 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + job_trigger = dlp_v2.JobTrigger() + job_trigger.status = "CANCELLED" + + request = dlp_v2.CreateJobTriggerRequest( + parent="parent_value", + job_trigger=job_trigger, + ) + + # Make the request + response = client.create_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py new file mode 100644 index 00000000..cae6db89 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateStoredInfoType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateStoredInfoTypeRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py new file mode 100644 index 00000000..d59a301d --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateStoredInfoType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateStoredInfoTypeRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py new file mode 100644 index 00000000..4903b032 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeidentifyContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeidentifyContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_deidentify_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeidentifyContentRequest( + ) + + # Make the request + response = await client.deidentify_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_DeidentifyContent_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py new file mode 100644 index 00000000..2422616c --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeidentifyContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeidentifyContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_deidentify_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeidentifyContentRequest( + ) + + # Make the request + response = client.deidentify_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_DeidentifyContent_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py new file mode 100644 index 00000000..f544f12d --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_deidentify_template(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py new file mode 100644 index 00000000..a33f3b26 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_deidentify_template(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py new file mode 100644 index 00000000..8737125b --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.delete_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py new file mode 100644 index 00000000..bb0ce9df --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDlpJobRequest( + name="name_value", + ) + + # Make the request + client.delete_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py new file mode 100644 index 00000000..f0aec8eb --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteInspectTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteInspectTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_inspect_template(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py new file mode 100644 index 00000000..c908d867 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteInspectTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteInspectTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_inspect_template(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py new file mode 100644 index 00000000..3784ee3e --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteJobTriggerRequest( + name="name_value", + ) + + # Make the request + await client.delete_job_trigger(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py new file mode 100644 index 00000000..9f4405da --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteJobTriggerRequest( + name="name_value", + ) + + # Make the request + client.delete_job_trigger(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py new file mode 100644 index 00000000..652d88ff --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteStoredInfoType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + await client.delete_stored_info_type(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py new file mode 100644 index 00000000..7e37ce36 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteStoredInfoType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + client.delete_stored_info_type(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py new file mode 100644 index 00000000..869504da --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FinishDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_FinishDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_finish_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.FinishDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.finish_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_FinishDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py new file mode 100644 index 00000000..1b694f90 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FinishDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_FinishDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_finish_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.FinishDlpJobRequest( + name="name_value", + ) + + # Make the request + client.finish_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_FinishDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py new file mode 100644 index 00000000..fc1570d3 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetDeidentifyTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py new file mode 100644 index 00000000..bb1e1986 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py new file mode 100644 index 00000000..2065aa85 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDlpJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py new file mode 100644 index 00000000..13959bde --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDlpJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py new file mode 100644 index 00000000..1a9c9649 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetInspectTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py new file mode 100644 index 00000000..112e3d83 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetInspectTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py new file mode 100644 index 00000000..248184c7 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py new file mode 100644 index 00000000..9c6cdb3a --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.get_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py new file mode 100644 index 00000000..a7820fe2 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetStoredInfoType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py new file mode 100644 index 00000000..d0b0a44c --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetStoredInfoType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py new file mode 100644 index 00000000..e9f9be5a --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for HybridInspectDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_HybridInspectDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_hybrid_inspect_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectDlpJobRequest( + name="name_value", + ) + + # Make the request + response = await client.hybrid_inspect_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_HybridInspectDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py new file mode 100644 index 00000000..2bfd7fe1 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for HybridInspectDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_HybridInspectDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_hybrid_inspect_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectDlpJobRequest( + name="name_value", + ) + + # Make the request + response = client.hybrid_inspect_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_HybridInspectDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py new file mode 100644 index 00000000..dbdd91c2 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for HybridInspectJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_HybridInspectJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_hybrid_inspect_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.hybrid_inspect_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_HybridInspectJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py new file mode 100644 index 00000000..a9c4c85e --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for HybridInspectJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_hybrid_inspect_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.hybrid_inspect_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py new file mode 100644 index 00000000..3f24588b --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InspectContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_InspectContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_inspect_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.InspectContentRequest( + ) + + # Make the request + response = await client.inspect_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_InspectContent_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py new file mode 100644 index 00000000..4b5a10f3 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InspectContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_InspectContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_inspect_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.InspectContentRequest( + ) + + # Make the request + response = client.inspect_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_InspectContent_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py new file mode 100644 index 00000000..d1a40dc0 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeidentifyTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListDeidentifyTemplates_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_deidentify_templates(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDeidentifyTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deidentify_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListDeidentifyTemplates_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py new file mode 100644 index 00000000..6a01f0fb --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeidentifyTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_deidentify_templates(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDeidentifyTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deidentify_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py new file mode 100644 index 00000000..57c790d8 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDlpJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListDlpJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_dlp_jobs(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDlpJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dlp_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListDlpJobs_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py new file mode 100644 index 00000000..7d06c237 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDlpJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListDlpJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_dlp_jobs(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDlpJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dlp_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListDlpJobs_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py new file mode 100644 index 00000000..16b871f8 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInfoTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListInfoTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_info_types(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListInfoTypesRequest( + ) + + # Make the request + response = await client.list_info_types(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ListInfoTypes_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py new file mode 100644 index 00000000..9e3ca167 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInfoTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListInfoTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_info_types(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListInfoTypesRequest( + ) + + # Make the request + response = client.list_info_types(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ListInfoTypes_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py new file mode 100644 index 00000000..6e405a4f --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInspectTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListInspectTemplates_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_inspect_templates(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListInspectTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inspect_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListInspectTemplates_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py new file mode 100644 index 00000000..71673677 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInspectTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListInspectTemplates_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_inspect_templates(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListInspectTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inspect_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListInspectTemplates_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py new file mode 100644 index 00000000..e8c0281f --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobTriggers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListJobTriggers_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_job_triggers(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListJobTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_triggers(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListJobTriggers_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py new file mode 100644 index 00000000..0f9141c0 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobTriggers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListJobTriggers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_job_triggers(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListJobTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_triggers(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListJobTriggers_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py new file mode 100644 index 00000000..460c99c4 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListStoredInfoTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListStoredInfoTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_stored_info_types(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListStoredInfoTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_stored_info_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListStoredInfoTypes_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py new file mode 100644 index 00000000..1ad1796e --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListStoredInfoTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListStoredInfoTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_stored_info_types(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListStoredInfoTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_stored_info_types(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListStoredInfoTypes_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py new file mode 100644 index 00000000..a7a0d502 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RedactImage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_RedactImage_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_redact_image(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.RedactImageRequest( + ) + + # Make the request + response = await client.redact_image(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_RedactImage_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py new file mode 100644 index 00000000..272bdb80 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RedactImage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_RedactImage_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_redact_image(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.RedactImageRequest( + ) + + # Make the request + response = client.redact_image(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_RedactImage_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py new file mode 100644 index 00000000..401f62df --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReidentifyContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ReidentifyContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_reidentify_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ReidentifyContentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.reidentify_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ReidentifyContent_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py new file mode 100644 index 00000000..9e654be9 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReidentifyContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ReidentifyContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_reidentify_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ReidentifyContentRequest( + parent="parent_value", + ) + + # Make the request + response = client.reidentify_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ReidentifyContent_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py new file mode 100644 index 00000000..8b32186c --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_update_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.update_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py new file mode 100644 index 00000000..e3296531 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_update_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.update_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py new file mode 100644 index 00000000..8e062116 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateInspectTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_update_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.update_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py new file mode 100644 index 00000000..332c5de6 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateInspectTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_update_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.update_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py new file mode 100644 index 00000000..58baaeeb --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_update_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.update_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py new file mode 100644 index 00000000..3694b5ff --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_update_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.update_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py new file mode 100644 index 00000000..d5658d32 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateStoredInfoType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_update_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.update_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py new file mode 100644 index 00000000..9471180b --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateStoredInfoType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_update_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = client.update_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json b/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json new file mode 100644 index 00000000..956f9eab --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json @@ -0,0 +1,5503 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.privacy.dlp.v2", + "version": "v2" + } + ], + "language": "PYTHON", + "name": "google-cloud-dlp", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.activate_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ActivateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ActivateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ActivateJobTriggerRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "activate_job_trigger" + }, + "description": "Sample for ActivateJobTrigger", + "file": "dlp_v2_generated_dlp_service_activate_job_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ActivateJobTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_activate_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.activate_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ActivateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ActivateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ActivateJobTriggerRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "activate_job_trigger" + }, + "description": "Sample for ActivateJobTrigger", + "file": "dlp_v2_generated_dlp_service_activate_job_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ActivateJobTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_activate_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.cancel_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CancelDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CancelDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CancelDlpJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "cancel_dlp_job" + }, + "description": "Sample for CancelDlpJob", + "file": "dlp_v2_generated_dlp_service_cancel_dlp_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CancelDlpJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_cancel_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.cancel_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CancelDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CancelDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CancelDlpJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "cancel_dlp_job" + }, + "description": "Sample for CancelDlpJob", + "file": "dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CancelDlpJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deidentify_template", + "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "create_deidentify_template" + }, + "description": "Sample for CreateDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_create_deidentify_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_deidentify_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deidentify_template", + "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "create_deidentify_template" + }, + "description": "Sample for CreateDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_create_deidentify_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_deidentify_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateDlpJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "inspect_job", + "type": "google.cloud.dlp_v2.types.InspectJobConfig" + }, + { + "name": "risk_job", + "type": "google.cloud.dlp_v2.types.RiskAnalysisJobConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "create_dlp_job" + }, + "description": "Sample for CreateDlpJob", + "file": "dlp_v2_generated_dlp_service_create_dlp_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateDlpJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateDlpJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "inspect_job", + "type": "google.cloud.dlp_v2.types.InspectJobConfig" + }, + { + "name": "risk_job", + "type": "google.cloud.dlp_v2.types.RiskAnalysisJobConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "create_dlp_job" + }, + "description": "Sample for CreateDlpJob", + "file": "dlp_v2_generated_dlp_service_create_dlp_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateDlpJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateInspectTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "inspect_template", + "type": "google.cloud.dlp_v2.types.InspectTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "create_inspect_template" + }, + "description": "Sample for CreateInspectTemplate", + "file": "dlp_v2_generated_dlp_service_create_inspect_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateInspectTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_inspect_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateInspectTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "inspect_template", + "type": "google.cloud.dlp_v2.types.InspectTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "create_inspect_template" + }, + "description": "Sample for CreateInspectTemplate", + "file": "dlp_v2_generated_dlp_service_create_inspect_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateInspectTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_inspect_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateJobTriggerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "job_trigger", + "type": "google.cloud.dlp_v2.types.JobTrigger" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "create_job_trigger" + }, + "description": "Sample for CreateJobTrigger", + "file": "dlp_v2_generated_dlp_service_create_job_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateJobTrigger_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateJobTriggerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "job_trigger", + "type": "google.cloud.dlp_v2.types.JobTrigger" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "create_job_trigger" + }, + "description": "Sample for CreateJobTrigger", + "file": "dlp_v2_generated_dlp_service_create_job_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateJobTrigger_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "config", + "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "create_stored_info_type" + }, + "description": "Sample for CreateStoredInfoType", + "file": "dlp_v2_generated_dlp_service_create_stored_info_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateStoredInfoType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_stored_info_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "config", + "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "create_stored_info_type" + }, + "description": "Sample for CreateStoredInfoType", + "file": "dlp_v2_generated_dlp_service_create_stored_info_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateStoredInfoType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_stored_info_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.deidentify_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeidentifyContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeidentifyContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeidentifyContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyContentResponse", + "shortName": "deidentify_content" + }, + "description": "Sample for DeidentifyContent", + "file": "dlp_v2_generated_dlp_service_deidentify_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeidentifyContent_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_deidentify_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.deidentify_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeidentifyContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeidentifyContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeidentifyContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyContentResponse", + "shortName": "deidentify_content" + }, + "description": "Sample for DeidentifyContent", + "file": "dlp_v2_generated_dlp_service_deidentify_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeidentifyContent_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_deidentify_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_deidentify_template" + }, + "description": "Sample for DeleteDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_delete_deidentify_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_deidentify_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_deidentify_template" + }, + "description": "Sample for DeleteDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_dlp_job" + }, + "description": "Sample for DeleteDlpJob", + "file": "dlp_v2_generated_dlp_service_delete_dlp_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteDlpJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_dlp_job" + }, + "description": "Sample for DeleteDlpJob", + "file": "dlp_v2_generated_dlp_service_delete_dlp_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteDlpJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_inspect_template" + }, + "description": "Sample for DeleteInspectTemplate", + "file": "dlp_v2_generated_dlp_service_delete_inspect_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteInspectTemplate_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_inspect_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_inspect_template" + }, + "description": "Sample for DeleteInspectTemplate", + "file": "dlp_v2_generated_dlp_service_delete_inspect_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteInspectTemplate_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_inspect_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_job_trigger" + }, + "description": "Sample for DeleteJobTrigger", + "file": "dlp_v2_generated_dlp_service_delete_job_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteJobTrigger_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_job_trigger" + }, + "description": "Sample for DeleteJobTrigger", + "file": "dlp_v2_generated_dlp_service_delete_job_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteJobTrigger_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_stored_info_type" + }, + "description": "Sample for DeleteStoredInfoType", + "file": "dlp_v2_generated_dlp_service_delete_stored_info_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteStoredInfoType_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_stored_info_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_stored_info_type" + }, + "description": "Sample for DeleteStoredInfoType", + "file": "dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteStoredInfoType_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.finish_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.FinishDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "FinishDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.FinishDlpJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "finish_dlp_job" + }, + "description": "Sample for FinishDlpJob", + "file": "dlp_v2_generated_dlp_service_finish_dlp_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_FinishDlpJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_finish_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.finish_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.FinishDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "FinishDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.FinishDlpJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "finish_dlp_job" + }, + "description": "Sample for FinishDlpJob", + "file": "dlp_v2_generated_dlp_service_finish_dlp_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_FinishDlpJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_finish_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "get_deidentify_template" + }, + "description": "Sample for GetDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_get_deidentify_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetDeidentifyTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_deidentify_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "get_deidentify_template" + }, + "description": "Sample for GetDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_get_deidentify_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_deidentify_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "get_dlp_job" + }, + "description": "Sample for GetDlpJob", + "file": "dlp_v2_generated_dlp_service_get_dlp_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetDlpJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "get_dlp_job" + }, + "description": "Sample for GetDlpJob", + "file": "dlp_v2_generated_dlp_service_get_dlp_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetDlpJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "get_inspect_template" + }, + "description": "Sample for GetInspectTemplate", + "file": "dlp_v2_generated_dlp_service_get_inspect_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetInspectTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_inspect_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "get_inspect_template" + }, + "description": "Sample for GetInspectTemplate", + "file": "dlp_v2_generated_dlp_service_get_inspect_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetInspectTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_inspect_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "get_job_trigger" + }, + "description": "Sample for GetJobTrigger", + "file": "dlp_v2_generated_dlp_service_get_job_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetJobTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "get_job_trigger" + }, + "description": "Sample for GetJobTrigger", + "file": "dlp_v2_generated_dlp_service_get_job_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetJobTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "get_stored_info_type" + }, + "description": "Sample for GetStoredInfoType", + "file": "dlp_v2_generated_dlp_service_get_stored_info_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetStoredInfoType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_stored_info_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "get_stored_info_type" + }, + "description": "Sample for GetStoredInfoType", + "file": "dlp_v2_generated_dlp_service_get_stored_info_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetStoredInfoType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_stored_info_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.hybrid_inspect_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "HybridInspectDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.HybridInspectDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", + "shortName": "hybrid_inspect_dlp_job" + }, + "description": "Sample for HybridInspectDlpJob", + "file": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_HybridInspectDlpJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.hybrid_inspect_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "HybridInspectDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.HybridInspectDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", + "shortName": "hybrid_inspect_dlp_job" + }, + "description": "Sample for HybridInspectDlpJob", + "file": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_HybridInspectDlpJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.hybrid_inspect_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "HybridInspectJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", + "shortName": "hybrid_inspect_job_trigger" + }, + "description": "Sample for HybridInspectJobTrigger", + "file": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_HybridInspectJobTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.hybrid_inspect_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "HybridInspectJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", + "shortName": "hybrid_inspect_job_trigger" + }, + "description": "Sample for HybridInspectJobTrigger", + "file": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.inspect_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.InspectContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "InspectContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.InspectContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectContentResponse", + "shortName": "inspect_content" + }, + "description": "Sample for InspectContent", + "file": "dlp_v2_generated_dlp_service_inspect_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_InspectContent_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_inspect_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.inspect_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.InspectContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "InspectContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.InspectContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectContentResponse", + "shortName": "inspect_content" + }, + "description": "Sample for InspectContent", + "file": "dlp_v2_generated_dlp_service_inspect_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_InspectContent_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_inspect_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_deidentify_templates", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListDeidentifyTemplates", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListDeidentifyTemplates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager", + "shortName": "list_deidentify_templates" + }, + "description": "Sample for ListDeidentifyTemplates", + "file": "dlp_v2_generated_dlp_service_list_deidentify_templates_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListDeidentifyTemplates_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_deidentify_templates_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_deidentify_templates", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListDeidentifyTemplates", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListDeidentifyTemplates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager", + "shortName": "list_deidentify_templates" + }, + "description": "Sample for ListDeidentifyTemplates", + "file": "dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_dlp_jobs", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListDlpJobs", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListDlpJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListDlpJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager", + "shortName": "list_dlp_jobs" + }, + "description": "Sample for ListDlpJobs", + "file": "dlp_v2_generated_dlp_service_list_dlp_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListDlpJobs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_dlp_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_dlp_jobs", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListDlpJobs", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListDlpJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListDlpJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager", + "shortName": "list_dlp_jobs" + }, + "description": "Sample for ListDlpJobs", + "file": "dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListDlpJobs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_info_types", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListInfoTypes", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListInfoTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListInfoTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ListInfoTypesResponse", + "shortName": "list_info_types" + }, + "description": "Sample for ListInfoTypes", + "file": "dlp_v2_generated_dlp_service_list_info_types_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListInfoTypes_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_info_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_info_types", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListInfoTypes", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListInfoTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListInfoTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ListInfoTypesResponse", + "shortName": "list_info_types" + }, + "description": "Sample for ListInfoTypes", + "file": "dlp_v2_generated_dlp_service_list_info_types_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListInfoTypes_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_info_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_inspect_templates", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListInspectTemplates", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListInspectTemplates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListInspectTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager", + "shortName": "list_inspect_templates" + }, + "description": "Sample for ListInspectTemplates", + "file": "dlp_v2_generated_dlp_service_list_inspect_templates_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListInspectTemplates_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_inspect_templates_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_inspect_templates", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListInspectTemplates", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListInspectTemplates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListInspectTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager", + "shortName": "list_inspect_templates" + }, + "description": "Sample for ListInspectTemplates", + "file": "dlp_v2_generated_dlp_service_list_inspect_templates_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListInspectTemplates_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_inspect_templates_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_job_triggers", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListJobTriggers", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListJobTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListJobTriggersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager", + "shortName": "list_job_triggers" + }, + "description": "Sample for ListJobTriggers", + "file": "dlp_v2_generated_dlp_service_list_job_triggers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListJobTriggers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_job_triggers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_job_triggers", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListJobTriggers", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListJobTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListJobTriggersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager", + "shortName": "list_job_triggers" + }, + "description": "Sample for ListJobTriggers", + "file": "dlp_v2_generated_dlp_service_list_job_triggers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListJobTriggers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_job_triggers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_stored_info_types", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListStoredInfoTypes", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListStoredInfoTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListStoredInfoTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager", + "shortName": "list_stored_info_types" + }, + "description": "Sample for ListStoredInfoTypes", + "file": "dlp_v2_generated_dlp_service_list_stored_info_types_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListStoredInfoTypes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_stored_info_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_stored_info_types", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListStoredInfoTypes", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListStoredInfoTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListStoredInfoTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager", + "shortName": "list_stored_info_types" + }, + "description": "Sample for ListStoredInfoTypes", + "file": "dlp_v2_generated_dlp_service_list_stored_info_types_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListStoredInfoTypes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_stored_info_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.redact_image", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.RedactImage", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "RedactImage" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.RedactImageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.RedactImageResponse", + "shortName": "redact_image" + }, + "description": "Sample for RedactImage", + "file": "dlp_v2_generated_dlp_service_redact_image_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_RedactImage_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_redact_image_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.redact_image", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.RedactImage", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "RedactImage" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.RedactImageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.RedactImageResponse", + "shortName": "redact_image" + }, + "description": "Sample for RedactImage", + "file": "dlp_v2_generated_dlp_service_redact_image_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_RedactImage_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_redact_image_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.reidentify_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ReidentifyContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ReidentifyContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ReidentifyContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ReidentifyContentResponse", + "shortName": "reidentify_content" + }, + "description": "Sample for ReidentifyContent", + "file": "dlp_v2_generated_dlp_service_reidentify_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ReidentifyContent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_reidentify_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.reidentify_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ReidentifyContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ReidentifyContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ReidentifyContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ReidentifyContentResponse", + "shortName": "reidentify_content" + }, + "description": "Sample for ReidentifyContent", + "file": "dlp_v2_generated_dlp_service_reidentify_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ReidentifyContent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_reidentify_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "deidentify_template", + "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "update_deidentify_template" + }, + "description": "Sample for UpdateDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_update_deidentify_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_deidentify_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "deidentify_template", + "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "update_deidentify_template" + }, + "description": "Sample for UpdateDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_update_deidentify_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_deidentify_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "inspect_template", + "type": "google.cloud.dlp_v2.types.InspectTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "update_inspect_template" + }, + "description": "Sample for UpdateInspectTemplate", + "file": "dlp_v2_generated_dlp_service_update_inspect_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateInspectTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_inspect_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "inspect_template", + "type": "google.cloud.dlp_v2.types.InspectTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "update_inspect_template" + }, + "description": "Sample for UpdateInspectTemplate", + "file": "dlp_v2_generated_dlp_service_update_inspect_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateInspectTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_inspect_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "job_trigger", + "type": "google.cloud.dlp_v2.types.JobTrigger" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "update_job_trigger" + }, + "description": "Sample for UpdateJobTrigger", + "file": "dlp_v2_generated_dlp_service_update_job_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateJobTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "job_trigger", + "type": "google.cloud.dlp_v2.types.JobTrigger" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "update_job_trigger" + }, + "description": "Sample for UpdateJobTrigger", + "file": "dlp_v2_generated_dlp_service_update_job_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateJobTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "config", + "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "update_stored_info_type" + }, + "description": "Sample for UpdateStoredInfoType", + "file": "dlp_v2_generated_dlp_service_update_stored_info_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateStoredInfoType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_stored_info_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "config", + "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "update_stored_info_type" + }, + "description": "Sample for UpdateStoredInfoType", + "file": "dlp_v2_generated_dlp_service_update_stored_info_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateStoredInfoType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_stored_info_type_sync.py" + } + ] +} diff --git a/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py b/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py new file mode 100644 index 00000000..9adcd0d5 --- /dev/null +++ b/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py @@ -0,0 +1,209 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class dlpCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'activate_job_trigger': ('name', ), + 'cancel_dlp_job': ('name', ), + 'create_deidentify_template': ('parent', 'deidentify_template', 'template_id', 'location_id', ), + 'create_dlp_job': ('parent', 'inspect_job', 'risk_job', 'job_id', 'location_id', ), + 'create_inspect_template': ('parent', 'inspect_template', 'template_id', 'location_id', ), + 'create_job_trigger': ('parent', 'job_trigger', 'trigger_id', 'location_id', ), + 'create_stored_info_type': ('parent', 'config', 'stored_info_type_id', 'location_id', ), + 'deidentify_content': ('parent', 'deidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'deidentify_template_name', 'location_id', ), + 'delete_deidentify_template': ('name', ), + 'delete_dlp_job': ('name', ), + 'delete_inspect_template': ('name', ), + 'delete_job_trigger': ('name', ), + 'delete_stored_info_type': ('name', ), + 'finish_dlp_job': ('name', ), + 'get_deidentify_template': ('name', ), + 'get_dlp_job': ('name', ), + 'get_inspect_template': ('name', ), + 'get_job_trigger': ('name', ), + 'get_stored_info_type': ('name', ), + 'hybrid_inspect_dlp_job': ('name', 'hybrid_item', ), + 'hybrid_inspect_job_trigger': ('name', 'hybrid_item', ), + 'inspect_content': ('parent', 'inspect_config', 'item', 'inspect_template_name', 'location_id', ), + 'list_deidentify_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'list_dlp_jobs': ('parent', 'filter', 'page_size', 'page_token', 'type_', 'order_by', 'location_id', ), + 'list_info_types': ('parent', 'language_code', 'filter', 'location_id', ), + 'list_inspect_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'list_job_triggers': ('parent', 'page_token', 'page_size', 'order_by', 'filter', 'type_', 'location_id', ), + 'list_stored_info_types': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'redact_image': ('parent', 'location_id', 'inspect_config', 'image_redaction_configs', 'include_findings', 'byte_item', ), + 'reidentify_content': ('parent', 'reidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'reidentify_template_name', 'location_id', ), + 'update_deidentify_template': ('name', 'deidentify_template', 'update_mask', ), + 'update_inspect_template': ('name', 'inspect_template', 'update_mask', ), + 'update_job_trigger': ('name', 'job_trigger', 'update_mask', ), + 'update_stored_info_type': ('name', 'config', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=dlpCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the dlp client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v2/setup.py b/owl-bot-staging/v2/setup.py new file mode 100644 index 00000000..2b4eb21b --- /dev/null +++ b/owl-bot-staging/v2/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-dlp' + + +description = "Google Cloud Dlp API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/dlp/gapic_version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/python-dlp" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/v2/testing/constraints-3.10.txt b/owl-bot-staging/v2/testing/constraints-3.10.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.11.txt b/owl-bot-staging/v2/testing/constraints-3.11.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.12.txt b/owl-bot-staging/v2/testing/constraints-3.12.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.7.txt b/owl-bot-staging/v2/testing/constraints-3.7.txt new file mode 100644 index 00000000..6c44adfe --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/owl-bot-staging/v2/testing/constraints-3.8.txt b/owl-bot-staging/v2/testing/constraints-3.8.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.9.txt b/owl-bot-staging/v2/testing/constraints-3.9.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v2/tests/__init__.py b/owl-bot-staging/v2/tests/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v2/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/__init__.py b/owl-bot-staging/v2/tests/unit/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py new file mode 100644 index 00000000..94f19f6e --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py @@ -0,0 +1,17403 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dlp_v2.services.dlp_service import DlpServiceAsyncClient +from google.cloud.dlp_v2.services.dlp_service import DlpServiceClient +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.services.dlp_service import transports +from google.cloud.dlp_v2.types import dlp +from google.cloud.dlp_v2.types import storage +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DlpServiceClient._get_default_mtls_endpoint(None) is None + assert DlpServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DlpServiceClient, "grpc"), + (DlpServiceAsyncClient, "grpc_asyncio"), + (DlpServiceClient, "rest"), +]) +def test_dlp_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dlp.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dlp.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DlpServiceGrpcTransport, "grpc"), + (transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DlpServiceRestTransport, "rest"), +]) +def test_dlp_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DlpServiceClient, "grpc"), + (DlpServiceAsyncClient, "grpc_asyncio"), + (DlpServiceClient, "rest"), +]) +def test_dlp_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dlp.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dlp.googleapis.com' + ) + + +def test_dlp_service_client_get_transport_class(): + transport = DlpServiceClient.get_transport_class() + available_transports = [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceRestTransport, + ] + assert transport in available_transports + + transport = DlpServiceClient.get_transport_class("grpc") + assert transport == transports.DlpServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), +]) +@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) +@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) +def test_dlp_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "true"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "false"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "true"), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "false"), +]) +@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) +@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_dlp_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + DlpServiceClient, DlpServiceAsyncClient +]) +@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) +@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) +def test_dlp_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), +]) +def test_dlp_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", grpc_helpers), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest", None), +]) +def test_dlp_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_dlp_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = DlpServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", grpc_helpers), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_dlp_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dlp.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="dlp.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.InspectContentRequest, + dict, +]) +def test_inspect_content(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectContentResponse( + ) + response = client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.InspectContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +def test_inspect_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + client.inspect_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.InspectContentRequest() + +@pytest.mark.asyncio +async def test_inspect_content_async(transport: str = 'grpc_asyncio', request_type=dlp.InspectContentRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse( + )) + response = await client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.InspectContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +@pytest.mark.asyncio +async def test_inspect_content_async_from_dict(): + await test_inspect_content_async(request_type=dict) + + +def test_inspect_content_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.InspectContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + call.return_value = dlp.InspectContentResponse() + client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_inspect_content_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.InspectContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse()) + await client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.RedactImageRequest, + dict, +]) +def test_redact_image(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.RedactImageResponse( + redacted_image=b'redacted_image_blob', + extracted_text='extracted_text_value', + ) + response = client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.RedactImageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + assert response.redacted_image == b'redacted_image_blob' + assert response.extracted_text == 'extracted_text_value' + + +def test_redact_image_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + client.redact_image() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.RedactImageRequest() + +@pytest.mark.asyncio +async def test_redact_image_async(transport: str = 'grpc_asyncio', request_type=dlp.RedactImageRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse( + redacted_image=b'redacted_image_blob', + extracted_text='extracted_text_value', + )) + response = await client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.RedactImageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + assert response.redacted_image == b'redacted_image_blob' + assert response.extracted_text == 'extracted_text_value' + + +@pytest.mark.asyncio +async def test_redact_image_async_from_dict(): + await test_redact_image_async(request_type=dict) + + +def test_redact_image_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.RedactImageRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + call.return_value = dlp.RedactImageResponse() + client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_redact_image_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.RedactImageRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse()) + await client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.DeidentifyContentRequest, + dict, +]) +def test_deidentify_content(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyContentResponse( + ) + response = client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +def test_deidentify_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + client.deidentify_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeidentifyContentRequest() + +@pytest.mark.asyncio +async def test_deidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.DeidentifyContentRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse( + )) + response = await client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +@pytest.mark.asyncio +async def test_deidentify_content_async_from_dict(): + await test_deidentify_content_async(request_type=dict) + + +def test_deidentify_content_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeidentifyContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + call.return_value = dlp.DeidentifyContentResponse() + client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_deidentify_content_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeidentifyContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse()) + await client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.ReidentifyContentRequest, + dict, +]) +def test_reidentify_content(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ReidentifyContentResponse( + ) + response = client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ReidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +def test_reidentify_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + client.reidentify_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ReidentifyContentRequest() + +@pytest.mark.asyncio +async def test_reidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.ReidentifyContentRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse( + )) + response = await client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ReidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +@pytest.mark.asyncio +async def test_reidentify_content_async_from_dict(): + await test_reidentify_content_async(request_type=dict) + + +def test_reidentify_content_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ReidentifyContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + call.return_value = dlp.ReidentifyContentResponse() + client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_reidentify_content_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ReidentifyContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse()) + await client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.ListInfoTypesRequest, + dict, +]) +def test_list_info_types(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse( + ) + response = client.list_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +def test_list_info_types_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + client.list_info_types() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInfoTypesRequest() + +@pytest.mark.asyncio +async def test_list_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInfoTypesRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse( + )) + response = await client.list_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +@pytest.mark.asyncio +async def test_list_info_types_async_from_dict(): + await test_list_info_types_async(request_type=dict) + + +def test_list_info_types_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_info_types_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_info_types( + dlp.ListInfoTypesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_info_types_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_info_types_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_info_types( + dlp.ListInfoTypesRequest(), + parent='parent_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateInspectTemplateRequest, + dict, +]) +def test_create_inspect_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + client.create_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateInspectTemplateRequest() + +@pytest.mark.asyncio +async def test_create_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_create_inspect_template_async_from_dict(): + await test_create_inspect_template_async(request_type=dict) + + +def test_create_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateInspectTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + call.return_value = dlp.InspectTemplate() + client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateInspectTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + await client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_inspect_template( + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name='name_value') + assert arg == mock_val + + +def test_create_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_inspect_template( + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateInspectTemplateRequest, + dict, +]) +def test_update_inspect_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + client.update_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateInspectTemplateRequest() + +@pytest.mark.asyncio +async def test_update_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_update_inspect_template_async_from_dict(): + await test_update_inspect_template_async(request_type=dict) + + +def test_update_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + call.return_value = dlp.InspectTemplate() + client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + await client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_inspect_template( + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_inspect_template( + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetInspectTemplateRequest, + dict, +]) +def test_get_inspect_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + client.get_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetInspectTemplateRequest() + +@pytest.mark.asyncio +async def test_get_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_get_inspect_template_async_from_dict(): + await test_get_inspect_template_async(request_type=dict) + + +def test_get_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + call.return_value = dlp.InspectTemplate() + client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + await client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_inspect_template( + dlp.GetInspectTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_inspect_template( + dlp.GetInspectTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListInspectTemplatesRequest, + dict, +]) +def test_list_inspect_templates(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInspectTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_inspect_templates_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + client.list_inspect_templates() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInspectTemplatesRequest() + +@pytest.mark.asyncio +async def test_list_inspect_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInspectTemplatesRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInspectTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_from_dict(): + await test_list_inspect_templates_async(request_type=dict) + + +def test_list_inspect_templates_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListInspectTemplatesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + call.return_value = dlp.ListInspectTemplatesResponse() + client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_inspect_templates_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListInspectTemplatesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) + await client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_inspect_templates_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_inspect_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_inspect_templates_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_inspect_templates_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_inspect_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_inspect_templates_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_inspect_templates_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_inspect_templates(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.InspectTemplate) + for i in results) +def test_list_inspect_templates_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + pages = list(client.list_inspect_templates(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_inspect_templates(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.InspectTemplate) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_inspect_templates(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteInspectTemplateRequest, + dict, +]) +def test_delete_inspect_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + client.delete_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteInspectTemplateRequest() + +@pytest.mark.asyncio +async def test_delete_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_inspect_template_async_from_dict(): + await test_delete_inspect_template_async(request_type=dict) + + +def test_delete_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + call.return_value = None + client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateDeidentifyTemplateRequest, + dict, +]) +def test_create_deidentify_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + client.create_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDeidentifyTemplateRequest() + +@pytest.mark.asyncio +async def test_create_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_create_deidentify_template_async_from_dict(): + await test_create_deidentify_template_async(request_type=dict) + + +def test_create_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDeidentifyTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + call.return_value = dlp.DeidentifyTemplate() + client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDeidentifyTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + await client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_deidentify_template( + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name='name_value') + assert arg == mock_val + + +def test_create_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_deidentify_template( + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateDeidentifyTemplateRequest, + dict, +]) +def test_update_deidentify_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + client.update_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateDeidentifyTemplateRequest() + +@pytest.mark.asyncio +async def test_update_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_update_deidentify_template_async_from_dict(): + await test_update_deidentify_template_async(request_type=dict) + + +def test_update_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + call.return_value = dlp.DeidentifyTemplate() + client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + await client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_deidentify_template( + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_deidentify_template( + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetDeidentifyTemplateRequest, + dict, +]) +def test_get_deidentify_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + client.get_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDeidentifyTemplateRequest() + +@pytest.mark.asyncio +async def test_get_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_get_deidentify_template_async_from_dict(): + await test_get_deidentify_template_async(request_type=dict) + + +def test_get_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + call.return_value = dlp.DeidentifyTemplate() + client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + await client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListDeidentifyTemplatesRequest, + dict, +]) +def test_list_deidentify_templates(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDeidentifyTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_deidentify_templates_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + client.list_deidentify_templates() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDeidentifyTemplatesRequest() + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDeidentifyTemplatesRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDeidentifyTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_from_dict(): + await test_list_deidentify_templates_async(request_type=dict) + + +def test_list_deidentify_templates_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDeidentifyTemplatesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + call.return_value = dlp.ListDeidentifyTemplatesResponse() + client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDeidentifyTemplatesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) + await client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_deidentify_templates_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_deidentify_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_deidentify_templates_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_deidentify_templates_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_deidentify_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_deidentify_templates_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_deidentify_templates_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_deidentify_templates(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) + for i in results) +def test_list_deidentify_templates_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + pages = list(client.list_deidentify_templates(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_deidentify_templates(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_deidentify_templates(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteDeidentifyTemplateRequest, + dict, +]) +def test_delete_deidentify_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + client.delete_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDeidentifyTemplateRequest() + +@pytest.mark.asyncio +async def test_delete_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_async_from_dict(): + await test_delete_deidentify_template_async(request_type=dict) + + +def test_delete_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + call.return_value = None + client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateJobTriggerRequest, + dict, +]) +def test_create_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + response = client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_create_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + client.create_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateJobTriggerRequest() + +@pytest.mark.asyncio +async def test_create_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + )) + response = await client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.asyncio +async def test_create_job_trigger_async_from_dict(): + await test_create_job_trigger_async(request_type=dict) + + +def test_create_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateJobTriggerRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + call.return_value = dlp.JobTrigger() + client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateJobTriggerRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + await client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_job_trigger( + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name='name_value') + assert arg == mock_val + + +def test_create_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_job_trigger( + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateJobTriggerRequest, + dict, +]) +def test_update_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + response = client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_update_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + client.update_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateJobTriggerRequest() + +@pytest.mark.asyncio +async def test_update_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + )) + response = await client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.asyncio +async def test_update_job_trigger_async_from_dict(): + await test_update_job_trigger_async(request_type=dict) + + +def test_update_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + call.return_value = dlp.JobTrigger() + client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + await client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_job_trigger( + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_job_trigger( + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.HybridInspectJobTriggerRequest, + dict, +]) +def test_hybrid_inspect_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse( + ) + response = client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + client.hybrid_inspect_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectJobTriggerRequest() + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( + )) + response = await client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_async_from_dict(): + await test_hybrid_inspect_job_trigger_async(request_type=dict) + + +def test_hybrid_inspect_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + call.return_value = dlp.HybridInspectResponse() + client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + await client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_hybrid_inspect_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.hybrid_inspect_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_hybrid_inspect_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.hybrid_inspect_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetJobTriggerRequest, + dict, +]) +def test_get_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + response = client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_get_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + client.get_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetJobTriggerRequest() + +@pytest.mark.asyncio +async def test_get_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.GetJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + )) + response = await client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.asyncio +async def test_get_job_trigger_async_from_dict(): + await test_get_job_trigger_async(request_type=dict) + + +def test_get_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + call.return_value = dlp.JobTrigger() + client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + await client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job_trigger( + dlp.GetJobTriggerRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job_trigger( + dlp.GetJobTriggerRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListJobTriggersRequest, + dict, +]) +def test_list_job_triggers(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse( + next_page_token='next_page_token_value', + ) + response = client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListJobTriggersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_job_triggers_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + client.list_job_triggers() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListJobTriggersRequest() + +@pytest.mark.asyncio +async def test_list_job_triggers_async(transport: str = 'grpc_asyncio', request_type=dlp.ListJobTriggersRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListJobTriggersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_job_triggers_async_from_dict(): + await test_list_job_triggers_async(request_type=dict) + + +def test_list_job_triggers_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListJobTriggersRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + call.return_value = dlp.ListJobTriggersResponse() + client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_job_triggers_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListJobTriggersRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) + await client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_job_triggers_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_job_triggers( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_job_triggers_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_job_triggers( + dlp.ListJobTriggersRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_job_triggers_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_job_triggers( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_job_triggers_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_job_triggers( + dlp.ListJobTriggersRequest(), + parent='parent_value', + ) + + +def test_list_job_triggers_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_job_triggers(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.JobTrigger) + for i in results) +def test_list_job_triggers_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + pages = list(client.list_job_triggers(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_job_triggers_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_job_triggers(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.JobTrigger) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_job_triggers_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_job_triggers(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteJobTriggerRequest, + dict, +]) +def test_delete_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + client.delete_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteJobTriggerRequest() + +@pytest.mark.asyncio +async def test_delete_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_job_trigger_async_from_dict(): + await test_delete_job_trigger_async(request_type=dict) + + +def test_delete_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + call.return_value = None + client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ActivateJobTriggerRequest, + dict, +]) +def test_activate_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + response = client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ActivateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_activate_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + client.activate_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ActivateJobTriggerRequest() + +@pytest.mark.asyncio +async def test_activate_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.ActivateJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + )) + response = await client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ActivateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +@pytest.mark.asyncio +async def test_activate_job_trigger_async_from_dict(): + await test_activate_job_trigger_async(request_type=dict) + + +def test_activate_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ActivateJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + call.return_value = dlp.DlpJob() + client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_activate_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ActivateJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + await client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateDlpJobRequest, + dict, +]) +def test_create_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + response = client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_create_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + client.create_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDlpJobRequest() + +@pytest.mark.asyncio +async def test_create_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + )) + response = await client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +@pytest.mark.asyncio +async def test_create_dlp_job_async_from_dict(): + await test_create_dlp_job_async(request_type=dict) + + +def test_create_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDlpJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + call.return_value = dlp.DlpJob() + client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDlpJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + await client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_dlp_job( + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) + + +def test_create_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + +@pytest.mark.asyncio +async def test_create_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_dlp_job( + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) + +@pytest.mark.asyncio +async def test_create_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListDlpJobsRequest, + dict, +]) +def test_list_dlp_jobs(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDlpJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_dlp_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + client.list_dlp_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDlpJobsRequest() + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDlpJobsRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDlpJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_from_dict(): + await test_list_dlp_jobs_async(request_type=dict) + + +def test_list_dlp_jobs_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDlpJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + call.return_value = dlp.ListDlpJobsResponse() + client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDlpJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) + await client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_dlp_jobs_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_dlp_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_dlp_jobs_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_dlp_jobs_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_dlp_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_dlp_jobs_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), + parent='parent_value', + ) + + +def test_list_dlp_jobs_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_dlp_jobs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DlpJob) + for i in results) +def test_list_dlp_jobs_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + pages = list(client.list_dlp_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_dlp_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.DlpJob) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_dlp_jobs(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.GetDlpJobRequest, + dict, +]) +def test_get_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + response = client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_get_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + client.get_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDlpJobRequest() + +@pytest.mark.asyncio +async def test_get_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + )) + response = await client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +@pytest.mark.asyncio +async def test_get_dlp_job_async_from_dict(): + await test_get_dlp_job_async(request_type=dict) + + +def test_get_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + call.return_value = dlp.DlpJob() + client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + await client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dlp_job( + dlp.GetDlpJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_dlp_job( + dlp.GetDlpJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteDlpJobRequest, + dict, +]) +def test_delete_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + client.delete_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDlpJobRequest() + +@pytest.mark.asyncio +async def test_delete_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_dlp_job_async_from_dict(): + await test_delete_dlp_job_async(request_type=dict) + + +def test_delete_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + call.return_value = None + client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CancelDlpJobRequest, + dict, +]) +def test_cancel_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CancelDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + client.cancel_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CancelDlpJobRequest() + +@pytest.mark.asyncio +async def test_cancel_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CancelDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CancelDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_dlp_job_async_from_dict(): + await test_cancel_dlp_job_async(request_type=dict) + + +def test_cancel_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CancelDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + call.return_value = None + client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_cancel_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CancelDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateStoredInfoTypeRequest, + dict, +]) +def test_create_stored_info_type(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType( + name='name_value', + ) + response = client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_create_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + client.create_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateStoredInfoTypeRequest() + +@pytest.mark.asyncio +async def test_create_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( + name='name_value', + )) + response = await client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_create_stored_info_type_async_from_dict(): + await test_create_stored_info_type_async(request_type=dict) + + +def test_create_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateStoredInfoTypeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + call.return_value = dlp.StoredInfoType() + client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateStoredInfoTypeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + await client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_stored_info_type( + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') + assert arg == mock_val + + +def test_create_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + +@pytest.mark.asyncio +async def test_create_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_stored_info_type( + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateStoredInfoTypeRequest, + dict, +]) +def test_update_stored_info_type(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType( + name='name_value', + ) + response = client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_update_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + client.update_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateStoredInfoTypeRequest() + +@pytest.mark.asyncio +async def test_update_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( + name='name_value', + )) + response = await client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_update_stored_info_type_async_from_dict(): + await test_update_stored_info_type_async(request_type=dict) + + +def test_update_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + call.return_value = dlp.StoredInfoType() + client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + await client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_stored_info_type( + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_stored_info_type( + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetStoredInfoTypeRequest, + dict, +]) +def test_get_stored_info_type(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType( + name='name_value', + ) + response = client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_get_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + client.get_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetStoredInfoTypeRequest() + +@pytest.mark.asyncio +async def test_get_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.GetStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( + name='name_value', + )) + response = await client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_stored_info_type_async_from_dict(): + await test_get_stored_info_type_async(request_type=dict) + + +def test_get_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + call.return_value = dlp.StoredInfoType() + client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + await client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListStoredInfoTypesRequest, + dict, +]) +def test_list_stored_info_types(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListStoredInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_stored_info_types_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + client.list_stored_info_types() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListStoredInfoTypesRequest() + +@pytest.mark.asyncio +async def test_list_stored_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListStoredInfoTypesRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListStoredInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_from_dict(): + await test_list_stored_info_types_async(request_type=dict) + + +def test_list_stored_info_types_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListStoredInfoTypesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + call.return_value = dlp.ListStoredInfoTypesResponse() + client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_stored_info_types_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListStoredInfoTypesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) + await client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_stored_info_types_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_stored_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_stored_info_types_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_stored_info_types_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_stored_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_stored_info_types_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), + parent='parent_value', + ) + + +def test_list_stored_info_types_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_stored_info_types(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.StoredInfoType) + for i in results) +def test_list_stored_info_types_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + pages = list(client.list_stored_info_types(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_stored_info_types(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.StoredInfoType) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_stored_info_types(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteStoredInfoTypeRequest, + dict, +]) +def test_delete_stored_info_type(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + client.delete_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteStoredInfoTypeRequest() + +@pytest.mark.asyncio +async def test_delete_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_async_from_dict(): + await test_delete_stored_info_type_async(request_type=dict) + + +def test_delete_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + call.return_value = None + client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.HybridInspectDlpJobRequest, + dict, +]) +def test_hybrid_inspect_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse( + ) + response = client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + client.hybrid_inspect_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectDlpJobRequest() + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( + )) + response = await client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_async_from_dict(): + await test_hybrid_inspect_dlp_job_async(request_type=dict) + + +def test_hybrid_inspect_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + call.return_value = dlp.HybridInspectResponse() + client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + await client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_hybrid_inspect_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.hybrid_inspect_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_hybrid_inspect_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.hybrid_inspect_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.FinishDlpJobRequest, + dict, +]) +def test_finish_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.FinishDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_finish_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + client.finish_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.FinishDlpJobRequest() + +@pytest.mark.asyncio +async def test_finish_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.FinishDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.FinishDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_finish_dlp_job_async_from_dict(): + await test_finish_dlp_job_async(request_type=dict) + + +def test_finish_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.FinishDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + call.return_value = None + client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_finish_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.FinishDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.InspectContentRequest, + dict, +]) +def test_inspect_content_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectContentResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.inspect_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_inspect_content_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_inspect_content") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_inspect_content") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.InspectContentRequest.pb(dlp.InspectContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectContentResponse.to_json(dlp.InspectContentResponse()) + + request = dlp.InspectContentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectContentResponse() + + client.inspect_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_inspect_content_rest_bad_request(transport: str = 'rest', request_type=dlp.InspectContentRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.inspect_content(request) + + +def test_inspect_content_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.RedactImageRequest, + dict, +]) +def test_redact_image_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.RedactImageResponse( + redacted_image=b'redacted_image_blob', + extracted_text='extracted_text_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.RedactImageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.redact_image(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + assert response.redacted_image == b'redacted_image_blob' + assert response.extracted_text == 'extracted_text_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_redact_image_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_redact_image") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_redact_image") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.RedactImageRequest.pb(dlp.RedactImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.RedactImageResponse.to_json(dlp.RedactImageResponse()) + + request = dlp.RedactImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.RedactImageResponse() + + client.redact_image(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_redact_image_rest_bad_request(transport: str = 'rest', request_type=dlp.RedactImageRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.redact_image(request) + + +def test_redact_image_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeidentifyContentRequest, + dict, +]) +def test_deidentify_content_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyContentResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.deidentify_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_deidentify_content_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_deidentify_content") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_deidentify_content") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.DeidentifyContentRequest.pb(dlp.DeidentifyContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyContentResponse.to_json(dlp.DeidentifyContentResponse()) + + request = dlp.DeidentifyContentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyContentResponse() + + client.deidentify_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_deidentify_content_rest_bad_request(transport: str = 'rest', request_type=dlp.DeidentifyContentRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.deidentify_content(request) + + +def test_deidentify_content_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ReidentifyContentRequest, + dict, +]) +def test_reidentify_content_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ReidentifyContentResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ReidentifyContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.reidentify_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +def test_reidentify_content_rest_required_fields(request_type=dlp.ReidentifyContentRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reidentify_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reidentify_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ReidentifyContentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ReidentifyContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.reidentify_content(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_reidentify_content_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.reidentify_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reidentify_content_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_reidentify_content") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_reidentify_content") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ReidentifyContentRequest.pb(dlp.ReidentifyContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ReidentifyContentResponse.to_json(dlp.ReidentifyContentResponse()) + + request = dlp.ReidentifyContentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ReidentifyContentResponse() + + client.reidentify_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_reidentify_content_rest_bad_request(transport: str = 'rest', request_type=dlp.ReidentifyContentRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.reidentify_content(request) + + +def test_reidentify_content_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListInfoTypesRequest, + dict, +]) +def test_list_info_types_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInfoTypesResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_info_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_info_types_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_info_types") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_info_types") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListInfoTypesRequest.pb(dlp.ListInfoTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListInfoTypesResponse.to_json(dlp.ListInfoTypesResponse()) + + request = dlp.ListInfoTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListInfoTypesResponse() + + client.list_info_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_info_types_rest_bad_request(transport: str = 'rest', request_type=dlp.ListInfoTypesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_info_types(request) + + +def test_list_info_types_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInfoTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_info_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/infoTypes" % client.transport._host, args[1]) + + +def test_list_info_types_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_info_types( + dlp.ListInfoTypesRequest(), + parent='parent_value', + ) + + +def test_list_info_types_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateInspectTemplateRequest, + dict, +]) +def test_create_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_inspect_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_inspect_template_rest_required_fields(request_type=dlp.CreateInspectTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_inspect_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "inspectTemplate", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_inspect_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_inspect_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateInspectTemplateRequest.pb(dlp.CreateInspectTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) + + request = dlp.CreateInspectTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectTemplate() + + client.create_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_inspect_template(request) + + +def test_create_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/inspectTemplates" % client.transport._host, args[1]) + + +def test_create_inspect_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + +def test_create_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateInspectTemplateRequest, + dict, +]) +def test_update_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_inspect_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_inspect_template_rest_required_fields(request_type=dlp.UpdateInspectTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_inspect_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_inspect_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_inspect_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateInspectTemplateRequest.pb(dlp.UpdateInspectTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) + + request = dlp.UpdateInspectTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectTemplate() + + client.update_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_inspect_template(request) + + +def test_update_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/inspectTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, args[1]) + + +def test_update_inspect_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetInspectTemplateRequest, + dict, +]) +def test_get_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_inspect_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_inspect_template_rest_required_fields(request_type=dlp.GetInspectTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_inspect_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_inspect_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_inspect_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetInspectTemplateRequest.pb(dlp.GetInspectTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) + + request = dlp.GetInspectTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectTemplate() + + client.get_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.GetInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_inspect_template(request) + + +def test_get_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/inspectTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, args[1]) + + +def test_get_inspect_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_inspect_template( + dlp.GetInspectTemplateRequest(), + name='name_value', + ) + + +def test_get_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListInspectTemplatesRequest, + dict, +]) +def test_list_inspect_templates_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInspectTemplatesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_inspect_templates(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_inspect_templates_rest_required_fields(request_type=dlp.ListInspectTemplatesRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_inspect_templates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_inspect_templates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListInspectTemplatesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_inspect_templates(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_inspect_templates_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_inspect_templates._get_unset_required_fields({}) + assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_inspect_templates_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_inspect_templates") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_inspect_templates") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListInspectTemplatesRequest.pb(dlp.ListInspectTemplatesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListInspectTemplatesResponse.to_json(dlp.ListInspectTemplatesResponse()) + + request = dlp.ListInspectTemplatesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListInspectTemplatesResponse() + + client.list_inspect_templates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_inspect_templates_rest_bad_request(transport: str = 'rest', request_type=dlp.ListInspectTemplatesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_inspect_templates(request) + + +def test_list_inspect_templates_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInspectTemplatesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_inspect_templates(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/inspectTemplates" % client.transport._host, args[1]) + + +def test_list_inspect_templates_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_inspect_templates_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListInspectTemplatesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'organizations/sample1'} + + pager = client.list_inspect_templates(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.InspectTemplate) + for i in results) + + pages = list(client.list_inspect_templates(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteInspectTemplateRequest, + dict, +]) +def test_delete_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_inspect_template(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_inspect_template_rest_required_fields(request_type=dlp.DeleteInspectTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_inspect_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_inspect_template") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteInspectTemplateRequest.pb(dlp.DeleteInspectTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteInspectTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_inspect_template(request) + + +def test_delete_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/inspectTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, args[1]) + + +def test_delete_inspect_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), + name='name_value', + ) + + +def test_delete_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateDeidentifyTemplateRequest, + dict, +]) +def test_create_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_deidentify_template_rest_required_fields(request_type=dlp.CreateDeidentifyTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_deidentify_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "deidentifyTemplate", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_deidentify_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_deidentify_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateDeidentifyTemplateRequest.pb(dlp.CreateDeidentifyTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) + + request = dlp.CreateDeidentifyTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyTemplate() + + client.create_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_deidentify_template(request) + + +def test_create_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/deidentifyTemplates" % client.transport._host, args[1]) + + +def test_create_deidentify_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + +def test_create_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateDeidentifyTemplateRequest, + dict, +]) +def test_update_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_deidentify_template_rest_required_fields(request_type=dlp.UpdateDeidentifyTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_deidentify_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_deidentify_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_deidentify_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateDeidentifyTemplateRequest.pb(dlp.UpdateDeidentifyTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) + + request = dlp.UpdateDeidentifyTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyTemplate() + + client.update_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_deidentify_template(request) + + +def test_update_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) + + +def test_update_deidentify_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetDeidentifyTemplateRequest, + dict, +]) +def test_get_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_deidentify_template_rest_required_fields(request_type=dlp.GetDeidentifyTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_deidentify_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_deidentify_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_deidentify_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetDeidentifyTemplateRequest.pb(dlp.GetDeidentifyTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) + + request = dlp.GetDeidentifyTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyTemplate() + + client.get_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.GetDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_deidentify_template(request) + + +def test_get_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) + + +def test_get_deidentify_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), + name='name_value', + ) + + +def test_get_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListDeidentifyTemplatesRequest, + dict, +]) +def test_list_deidentify_templates_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDeidentifyTemplatesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_deidentify_templates(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_deidentify_templates_rest_required_fields(request_type=dlp.ListDeidentifyTemplatesRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_deidentify_templates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_deidentify_templates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListDeidentifyTemplatesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_deidentify_templates(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_deidentify_templates_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_deidentify_templates._get_unset_required_fields({}) + assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_deidentify_templates_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_deidentify_templates") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_deidentify_templates") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListDeidentifyTemplatesRequest.pb(dlp.ListDeidentifyTemplatesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListDeidentifyTemplatesResponse.to_json(dlp.ListDeidentifyTemplatesResponse()) + + request = dlp.ListDeidentifyTemplatesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListDeidentifyTemplatesResponse() + + client.list_deidentify_templates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_deidentify_templates_rest_bad_request(transport: str = 'rest', request_type=dlp.ListDeidentifyTemplatesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_deidentify_templates(request) + + +def test_list_deidentify_templates_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDeidentifyTemplatesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_deidentify_templates(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/deidentifyTemplates" % client.transport._host, args[1]) + + +def test_list_deidentify_templates_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_deidentify_templates_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListDeidentifyTemplatesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'organizations/sample1'} + + pager = client.list_deidentify_templates(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) + for i in results) + + pages = list(client.list_deidentify_templates(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteDeidentifyTemplateRequest, + dict, +]) +def test_delete_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_deidentify_template_rest_required_fields(request_type=dlp.DeleteDeidentifyTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_deidentify_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_deidentify_template") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteDeidentifyTemplateRequest.pb(dlp.DeleteDeidentifyTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteDeidentifyTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_deidentify_template(request) + + +def test_delete_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) + + +def test_delete_deidentify_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), + name='name_value', + ) + + +def test_delete_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateJobTriggerRequest, + dict, +]) +def test_create_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_create_job_trigger_rest_required_fields(request_type=dlp.CreateJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "jobTrigger", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateJobTriggerRequest.pb(dlp.CreateJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) + + request = dlp.CreateJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.JobTrigger() + + client.create_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_job_trigger(request) + + +def test_create_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1]) + + +def test_create_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + +def test_create_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateJobTriggerRequest, + dict, +]) +def test_update_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_update_job_trigger_rest_required_fields(request_type=dlp.UpdateJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateJobTriggerRequest.pb(dlp.UpdateJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) + + request = dlp.UpdateJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.JobTrigger() + + client.update_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_job_trigger(request) + + +def test_update_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) + + +def test_update_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.HybridInspectJobTriggerRequest, + dict, +]) +def test_hybrid_inspect_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.hybrid_inspect_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_job_trigger_rest_required_fields(request_type=dlp.HybridInspectJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.hybrid_inspect_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_hybrid_inspect_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.hybrid_inspect_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_hybrid_inspect_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.HybridInspectJobTriggerRequest.pb(dlp.HybridInspectJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.HybridInspectResponse.to_json(dlp.HybridInspectResponse()) + + request = dlp.HybridInspectJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.HybridInspectResponse() + + client.hybrid_inspect_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_hybrid_inspect_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.HybridInspectJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.hybrid_inspect_job_trigger(request) + + +def test_hybrid_inspect_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.hybrid_inspect_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect" % client.transport._host, args[1]) + + +def test_hybrid_inspect_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), + name='name_value', + ) + + +def test_hybrid_inspect_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetJobTriggerRequest, + dict, +]) +def test_get_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_get_job_trigger_rest_required_fields(request_type=dlp.GetJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetJobTriggerRequest.pb(dlp.GetJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) + + request = dlp.GetJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.JobTrigger() + + client.get_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.GetJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_job_trigger(request) + + +def test_get_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) + + +def test_get_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job_trigger( + dlp.GetJobTriggerRequest(), + name='name_value', + ) + + +def test_get_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListJobTriggersRequest, + dict, +]) +def test_list_job_triggers_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListJobTriggersResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_job_triggers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_job_triggers_rest_required_fields(request_type=dlp.ListJobTriggersRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_job_triggers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_job_triggers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "location_id", "order_by", "page_size", "page_token", "type_", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListJobTriggersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_job_triggers(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_job_triggers_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_job_triggers._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "locationId", "orderBy", "pageSize", "pageToken", "type", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_job_triggers_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_job_triggers") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_job_triggers") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListJobTriggersRequest.pb(dlp.ListJobTriggersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListJobTriggersResponse.to_json(dlp.ListJobTriggersResponse()) + + request = dlp.ListJobTriggersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListJobTriggersResponse() + + client.list_job_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_job_triggers_rest_bad_request(transport: str = 'rest', request_type=dlp.ListJobTriggersRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_job_triggers(request) + + +def test_list_job_triggers_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListJobTriggersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_job_triggers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1]) + + +def test_list_job_triggers_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_job_triggers( + dlp.ListJobTriggersRequest(), + parent='parent_value', + ) + + +def test_list_job_triggers_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListJobTriggersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1'} + + pager = client.list_job_triggers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.JobTrigger) + for i in results) + + pages = list(client.list_job_triggers(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteJobTriggerRequest, + dict, +]) +def test_delete_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_job_trigger(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_trigger_rest_required_fields(request_type=dlp.DeleteJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_job_trigger") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteJobTriggerRequest.pb(dlp.DeleteJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_job_trigger(request) + + +def test_delete_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) + + +def test_delete_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), + name='name_value', + ) + + +def test_delete_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ActivateJobTriggerRequest, + dict, +]) +def test_activate_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.activate_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_activate_job_trigger_rest_required_fields(request_type=dlp.ActivateJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).activate_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).activate_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.activate_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_activate_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.activate_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_activate_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_activate_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_activate_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ActivateJobTriggerRequest.pb(dlp.ActivateJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) + + request = dlp.ActivateJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DlpJob() + + client.activate_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_activate_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.ActivateJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.activate_job_trigger(request) + + +def test_activate_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateDlpJobRequest, + dict, +]) +def test_create_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_dlp_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_create_dlp_job_rest_required_fields(request_type=dlp.CreateDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_dlp_job") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_dlp_job") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateDlpJobRequest.pb(dlp.CreateDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) + + request = dlp.CreateDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DlpJob() + + client.create_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_dlp_job(request) + + +def test_create_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1]) + + +def test_create_dlp_job_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + +def test_create_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListDlpJobsRequest, + dict, +]) +def test_list_dlp_jobs_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDlpJobsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_dlp_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_dlp_jobs_rest_required_fields(request_type=dlp.ListDlpJobsRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_dlp_jobs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_dlp_jobs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "location_id", "order_by", "page_size", "page_token", "type_", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListDlpJobsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_dlp_jobs(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_dlp_jobs_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_dlp_jobs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "locationId", "orderBy", "pageSize", "pageToken", "type", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_dlp_jobs_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_dlp_jobs") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_dlp_jobs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListDlpJobsRequest.pb(dlp.ListDlpJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListDlpJobsResponse.to_json(dlp.ListDlpJobsResponse()) + + request = dlp.ListDlpJobsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListDlpJobsResponse() + + client.list_dlp_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_dlp_jobs_rest_bad_request(transport: str = 'rest', request_type=dlp.ListDlpJobsRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_dlp_jobs(request) + + +def test_list_dlp_jobs_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDlpJobsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_dlp_jobs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1]) + + +def test_list_dlp_jobs_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), + parent='parent_value', + ) + + +def test_list_dlp_jobs_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListDlpJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1'} + + pager = client.list_dlp_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DlpJob) + for i in results) + + pages = list(client.list_dlp_jobs(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + dlp.GetDlpJobRequest, + dict, +]) +def test_get_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_dlp_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_get_dlp_job_rest_required_fields(request_type=dlp.GetDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_dlp_job") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_dlp_job") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetDlpJobRequest.pb(dlp.GetDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) + + request = dlp.GetDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DlpJob() + + client.get_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.GetDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_dlp_job(request) + + +def test_get_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/dlpJobs/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1]) + + +def test_get_dlp_job_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dlp_job( + dlp.GetDlpJobRequest(), + name='name_value', + ) + + +def test_get_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteDlpJobRequest, + dict, +]) +def test_delete_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_dlp_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dlp_job_rest_required_fields(request_type=dlp.DeleteDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_dlp_job") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteDlpJobRequest.pb(dlp.DeleteDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_dlp_job(request) + + +def test_delete_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/dlpJobs/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1]) + + +def test_delete_dlp_job_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), + name='name_value', + ) + + +def test_delete_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CancelDlpJobRequest, + dict, +]) +def test_cancel_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.cancel_dlp_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_dlp_job_rest_required_fields(request_type=dlp.CancelDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.cancel_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_cancel_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.cancel_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_cancel_dlp_job") as pre: + pre.assert_not_called() + pb_message = dlp.CancelDlpJobRequest.pb(dlp.CancelDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.CancelDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.cancel_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_cancel_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.CancelDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_dlp_job(request) + + +def test_cancel_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateStoredInfoTypeRequest, + dict, +]) +def test_create_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_create_stored_info_type_rest_required_fields(request_type=dlp.CreateStoredInfoTypeRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_stored_info_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "config", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_stored_info_type") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_stored_info_type") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateStoredInfoTypeRequest.pb(dlp.CreateStoredInfoTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) + + request = dlp.CreateStoredInfoTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.StoredInfoType() + + client.create_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_stored_info_type(request) + + +def test_create_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, args[1]) + + +def test_create_stored_info_type_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + +def test_create_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateStoredInfoTypeRequest, + dict, +]) +def test_update_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_update_stored_info_type_rest_required_fields(request_type=dlp.UpdateStoredInfoTypeRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_stored_info_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_stored_info_type") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_stored_info_type") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateStoredInfoTypeRequest.pb(dlp.UpdateStoredInfoTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) + + request = dlp.UpdateStoredInfoTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.StoredInfoType() + + client.update_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_stored_info_type(request) + + +def test_update_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) + + +def test_update_stored_info_type_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetStoredInfoTypeRequest, + dict, +]) +def test_get_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_get_stored_info_type_rest_required_fields(request_type=dlp.GetStoredInfoTypeRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_stored_info_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_stored_info_type") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_stored_info_type") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetStoredInfoTypeRequest.pb(dlp.GetStoredInfoTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) + + request = dlp.GetStoredInfoTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.StoredInfoType() + + client.get_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.GetStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_stored_info_type(request) + + +def test_get_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) + + +def test_get_stored_info_type_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), + name='name_value', + ) + + +def test_get_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListStoredInfoTypesRequest, + dict, +]) +def test_list_stored_info_types_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListStoredInfoTypesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_stored_info_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_stored_info_types_rest_required_fields(request_type=dlp.ListStoredInfoTypesRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_stored_info_types._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_stored_info_types._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListStoredInfoTypesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_stored_info_types(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_stored_info_types_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_stored_info_types._get_unset_required_fields({}) + assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_stored_info_types_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_stored_info_types") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_stored_info_types") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListStoredInfoTypesRequest.pb(dlp.ListStoredInfoTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListStoredInfoTypesResponse.to_json(dlp.ListStoredInfoTypesResponse()) + + request = dlp.ListStoredInfoTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListStoredInfoTypesResponse() + + client.list_stored_info_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_stored_info_types_rest_bad_request(transport: str = 'rest', request_type=dlp.ListStoredInfoTypesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_stored_info_types(request) + + +def test_list_stored_info_types_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListStoredInfoTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_stored_info_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, args[1]) + + +def test_list_stored_info_types_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), + parent='parent_value', + ) + + +def test_list_stored_info_types_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListStoredInfoTypesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'organizations/sample1'} + + pager = client.list_stored_info_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.StoredInfoType) + for i in results) + + pages = list(client.list_stored_info_types(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteStoredInfoTypeRequest, + dict, +]) +def test_delete_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_stored_info_type_rest_required_fields(request_type=dlp.DeleteStoredInfoTypeRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_stored_info_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_stored_info_type") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteStoredInfoTypeRequest.pb(dlp.DeleteStoredInfoTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteStoredInfoTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_stored_info_type(request) + + +def test_delete_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) + + +def test_delete_stored_info_type_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), + name='name_value', + ) + + +def test_delete_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.HybridInspectDlpJobRequest, + dict, +]) +def test_hybrid_inspect_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.hybrid_inspect_dlp_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_dlp_job_rest_required_fields(request_type=dlp.HybridInspectDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.hybrid_inspect_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_hybrid_inspect_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.hybrid_inspect_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_hybrid_inspect_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_dlp_job") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_dlp_job") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.HybridInspectDlpJobRequest.pb(dlp.HybridInspectDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.HybridInspectResponse.to_json(dlp.HybridInspectResponse()) + + request = dlp.HybridInspectDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.HybridInspectResponse() + + client.hybrid_inspect_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_hybrid_inspect_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.HybridInspectDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.hybrid_inspect_dlp_job(request) + + +def test_hybrid_inspect_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.hybrid_inspect_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect" % client.transport._host, args[1]) + + +def test_hybrid_inspect_dlp_job_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), + name='name_value', + ) + + +def test_hybrid_inspect_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.FinishDlpJobRequest, + dict, +]) +def test_finish_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.finish_dlp_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_finish_dlp_job_rest_required_fields(request_type=dlp.FinishDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).finish_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).finish_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.finish_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_finish_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.finish_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_finish_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_finish_dlp_job") as pre: + pre.assert_not_called() + pb_message = dlp.FinishDlpJobRequest.pb(dlp.FinishDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.FinishDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.finish_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_finish_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.FinishDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.finish_dlp_job(request) + + +def test_finish_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DlpServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DlpServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, + transports.DlpServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = DlpServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DlpServiceGrpcTransport, + ) + +def test_dlp_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DlpServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_dlp_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DlpServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'inspect_content', + 'redact_image', + 'deidentify_content', + 'reidentify_content', + 'list_info_types', + 'create_inspect_template', + 'update_inspect_template', + 'get_inspect_template', + 'list_inspect_templates', + 'delete_inspect_template', + 'create_deidentify_template', + 'update_deidentify_template', + 'get_deidentify_template', + 'list_deidentify_templates', + 'delete_deidentify_template', + 'create_job_trigger', + 'update_job_trigger', + 'hybrid_inspect_job_trigger', + 'get_job_trigger', + 'list_job_triggers', + 'delete_job_trigger', + 'activate_job_trigger', + 'create_dlp_job', + 'list_dlp_jobs', + 'get_dlp_job', + 'delete_dlp_job', + 'cancel_dlp_job', + 'create_stored_info_type', + 'update_stored_info_type', + 'get_stored_info_type', + 'list_stored_info_types', + 'delete_stored_info_type', + 'hybrid_inspect_dlp_job', + 'finish_dlp_job', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_dlp_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DlpServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_dlp_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DlpServiceTransport() + adc.assert_called_once() + + +def test_dlp_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DlpServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, + ], +) +def test_dlp_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, + transports.DlpServiceRestTransport, + ], +) +def test_dlp_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DlpServiceGrpcTransport, grpc_helpers), + (transports.DlpServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_dlp_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dlp.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dlp.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) +def test_dlp_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_dlp_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.DlpServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_dlp_service_host_no_port(transport_name): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dlp.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dlp.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_dlp_service_host_with_port(transport_name): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dlp.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dlp.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_dlp_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DlpServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DlpServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.inspect_content._session + session2 = client2.transport.inspect_content._session + assert session1 != session2 + session1 = client1.transport.redact_image._session + session2 = client2.transport.redact_image._session + assert session1 != session2 + session1 = client1.transport.deidentify_content._session + session2 = client2.transport.deidentify_content._session + assert session1 != session2 + session1 = client1.transport.reidentify_content._session + session2 = client2.transport.reidentify_content._session + assert session1 != session2 + session1 = client1.transport.list_info_types._session + session2 = client2.transport.list_info_types._session + assert session1 != session2 + session1 = client1.transport.create_inspect_template._session + session2 = client2.transport.create_inspect_template._session + assert session1 != session2 + session1 = client1.transport.update_inspect_template._session + session2 = client2.transport.update_inspect_template._session + assert session1 != session2 + session1 = client1.transport.get_inspect_template._session + session2 = client2.transport.get_inspect_template._session + assert session1 != session2 + session1 = client1.transport.list_inspect_templates._session + session2 = client2.transport.list_inspect_templates._session + assert session1 != session2 + session1 = client1.transport.delete_inspect_template._session + session2 = client2.transport.delete_inspect_template._session + assert session1 != session2 + session1 = client1.transport.create_deidentify_template._session + session2 = client2.transport.create_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.update_deidentify_template._session + session2 = client2.transport.update_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.get_deidentify_template._session + session2 = client2.transport.get_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.list_deidentify_templates._session + session2 = client2.transport.list_deidentify_templates._session + assert session1 != session2 + session1 = client1.transport.delete_deidentify_template._session + session2 = client2.transport.delete_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.create_job_trigger._session + session2 = client2.transport.create_job_trigger._session + assert session1 != session2 + session1 = client1.transport.update_job_trigger._session + session2 = client2.transport.update_job_trigger._session + assert session1 != session2 + session1 = client1.transport.hybrid_inspect_job_trigger._session + session2 = client2.transport.hybrid_inspect_job_trigger._session + assert session1 != session2 + session1 = client1.transport.get_job_trigger._session + session2 = client2.transport.get_job_trigger._session + assert session1 != session2 + session1 = client1.transport.list_job_triggers._session + session2 = client2.transport.list_job_triggers._session + assert session1 != session2 + session1 = client1.transport.delete_job_trigger._session + session2 = client2.transport.delete_job_trigger._session + assert session1 != session2 + session1 = client1.transport.activate_job_trigger._session + session2 = client2.transport.activate_job_trigger._session + assert session1 != session2 + session1 = client1.transport.create_dlp_job._session + session2 = client2.transport.create_dlp_job._session + assert session1 != session2 + session1 = client1.transport.list_dlp_jobs._session + session2 = client2.transport.list_dlp_jobs._session + assert session1 != session2 + session1 = client1.transport.get_dlp_job._session + session2 = client2.transport.get_dlp_job._session + assert session1 != session2 + session1 = client1.transport.delete_dlp_job._session + session2 = client2.transport.delete_dlp_job._session + assert session1 != session2 + session1 = client1.transport.cancel_dlp_job._session + session2 = client2.transport.cancel_dlp_job._session + assert session1 != session2 + session1 = client1.transport.create_stored_info_type._session + session2 = client2.transport.create_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.update_stored_info_type._session + session2 = client2.transport.update_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.get_stored_info_type._session + session2 = client2.transport.get_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.list_stored_info_types._session + session2 = client2.transport.list_stored_info_types._session + assert session1 != session2 + session1 = client1.transport.delete_stored_info_type._session + session2 = client2.transport.delete_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.hybrid_inspect_dlp_job._session + session2 = client2.transport.hybrid_inspect_dlp_job._session + assert session1 != session2 + session1 = client1.transport.finish_dlp_job._session + session2 = client2.transport.finish_dlp_job._session + assert session1 != session2 +def test_dlp_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DlpServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_dlp_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DlpServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) +def test_dlp_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) +def test_dlp_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_deidentify_template_path(): + organization = "squid" + deidentify_template = "clam" + expected = "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) + actual = DlpServiceClient.deidentify_template_path(organization, deidentify_template) + assert expected == actual + + +def test_parse_deidentify_template_path(): + expected = { + "organization": "whelk", + "deidentify_template": "octopus", + } + path = DlpServiceClient.deidentify_template_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_deidentify_template_path(path) + assert expected == actual + +def test_dlp_content_path(): + project = "oyster" + expected = "projects/{project}/dlpContent".format(project=project, ) + actual = DlpServiceClient.dlp_content_path(project) + assert expected == actual + + +def test_parse_dlp_content_path(): + expected = { + "project": "nudibranch", + } + path = DlpServiceClient.dlp_content_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_dlp_content_path(path) + assert expected == actual + +def test_dlp_job_path(): + project = "cuttlefish" + dlp_job = "mussel" + expected = "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) + actual = DlpServiceClient.dlp_job_path(project, dlp_job) + assert expected == actual + + +def test_parse_dlp_job_path(): + expected = { + "project": "winkle", + "dlp_job": "nautilus", + } + path = DlpServiceClient.dlp_job_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_dlp_job_path(path) + assert expected == actual + +def test_finding_path(): + project = "scallop" + location = "abalone" + finding = "squid" + expected = "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) + actual = DlpServiceClient.finding_path(project, location, finding) + assert expected == actual + + +def test_parse_finding_path(): + expected = { + "project": "clam", + "location": "whelk", + "finding": "octopus", + } + path = DlpServiceClient.finding_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_finding_path(path) + assert expected == actual + +def test_inspect_template_path(): + organization = "oyster" + inspect_template = "nudibranch" + expected = "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) + actual = DlpServiceClient.inspect_template_path(organization, inspect_template) + assert expected == actual + + +def test_parse_inspect_template_path(): + expected = { + "organization": "cuttlefish", + "inspect_template": "mussel", + } + path = DlpServiceClient.inspect_template_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_inspect_template_path(path) + assert expected == actual + +def test_job_trigger_path(): + project = "winkle" + job_trigger = "nautilus" + expected = "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) + actual = DlpServiceClient.job_trigger_path(project, job_trigger) + assert expected == actual + + +def test_parse_job_trigger_path(): + expected = { + "project": "scallop", + "job_trigger": "abalone", + } + path = DlpServiceClient.job_trigger_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_job_trigger_path(path) + assert expected == actual + +def test_stored_info_type_path(): + organization = "squid" + stored_info_type = "clam" + expected = "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) + actual = DlpServiceClient.stored_info_type_path(organization, stored_info_type) + assert expected == actual + + +def test_parse_stored_info_type_path(): + expected = { + "organization": "whelk", + "stored_info_type": "octopus", + } + path = DlpServiceClient.stored_info_type_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_stored_info_type_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DlpServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = DlpServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = DlpServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = DlpServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DlpServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = DlpServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = DlpServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = DlpServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DlpServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = DlpServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = DlpServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 40d46d60bd8b556396f6fe5fc3ffb27608e57e9b Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 8 Feb 2023 22:43:45 +0000 Subject: [PATCH 2/7] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- google/cloud/dlp_v2/gapic_metadata.json | 175 + .../dlp_v2/services/dlp_service/client.py | 2 + .../dlp_service/transports/__init__.py | 4 + .../services/dlp_service/transports/rest.py | 3335 +-- google/cloud/dlp_v2/types/storage.py | 8 +- owl-bot-staging/v2/.coveragerc | 13 - owl-bot-staging/v2/.flake8 | 33 - owl-bot-staging/v2/MANIFEST.in | 2 - owl-bot-staging/v2/README.rst | 49 - owl-bot-staging/v2/docs/conf.py | 376 - .../v2/docs/dlp_v2/dlp_service.rst | 10 - owl-bot-staging/v2/docs/dlp_v2/services.rst | 6 - owl-bot-staging/v2/docs/dlp_v2/types.rst | 6 - owl-bot-staging/v2/docs/index.rst | 7 - .../v2/google/cloud/dlp/__init__.py | 395 - .../v2/google/cloud/dlp/gapic_version.py | 16 - owl-bot-staging/v2/google/cloud/dlp/py.typed | 2 - .../v2/google/cloud/dlp_v2/__init__.py | 396 - .../google/cloud/dlp_v2/gapic_metadata.json | 538 - .../v2/google/cloud/dlp_v2/gapic_version.py | 16 - .../v2/google/cloud/dlp_v2/py.typed | 2 - .../google/cloud/dlp_v2/services/__init__.py | 15 - .../dlp_v2/services/dlp_service/__init__.py | 22 - .../services/dlp_service/async_client.py | 4142 ---- .../dlp_v2/services/dlp_service/client.py | 4267 ---- .../dlp_v2/services/dlp_service/pagers.py | 623 - .../dlp_service/transports/__init__.py | 38 - .../services/dlp_service/transports/base.py | 751 - .../services/dlp_service/transports/grpc.py | 1261 -- .../dlp_service/transports/grpc_asyncio.py | 1260 -- .../v2/google/cloud/dlp_v2/types/__init__.py | 390 - .../v2/google/cloud/dlp_v2/types/dlp.py | 8846 -------- .../v2/google/cloud/dlp_v2/types/storage.py | 1474 -- owl-bot-staging/v2/mypy.ini | 3 - owl-bot-staging/v2/noxfile.py | 184 - ..._dlp_service_activate_job_trigger_async.py | 52 - ...d_dlp_service_activate_job_trigger_sync.py | 52 - ...erated_dlp_service_cancel_dlp_job_async.py | 50 - ...nerated_dlp_service_cancel_dlp_job_sync.py | 50 - ...ervice_create_deidentify_template_async.py | 52 - ...service_create_deidentify_template_sync.py | 52 - ...erated_dlp_service_create_dlp_job_async.py | 52 - ...nerated_dlp_service_create_dlp_job_sync.py | 52 - ...p_service_create_inspect_template_async.py | 52 - ...lp_service_create_inspect_template_sync.py | 52 - ...ed_dlp_service_create_job_trigger_async.py | 56 - ...ted_dlp_service_create_job_trigger_sync.py | 56 - ...p_service_create_stored_info_type_async.py | 52 - ...lp_service_create_stored_info_type_sync.py | 52 - ...ed_dlp_service_deidentify_content_async.py | 51 - ...ted_dlp_service_deidentify_content_sync.py | 51 - ...ervice_delete_deidentify_template_async.py | 50 - ...service_delete_deidentify_template_sync.py | 50 - ...erated_dlp_service_delete_dlp_job_async.py | 50 - ...nerated_dlp_service_delete_dlp_job_sync.py | 50 - ...p_service_delete_inspect_template_async.py | 50 - ...lp_service_delete_inspect_template_sync.py | 50 - ...ed_dlp_service_delete_job_trigger_async.py | 50 - ...ted_dlp_service_delete_job_trigger_sync.py | 50 - ...p_service_delete_stored_info_type_async.py | 50 - ...lp_service_delete_stored_info_type_sync.py | 50 - ...erated_dlp_service_finish_dlp_job_async.py | 50 - ...nerated_dlp_service_finish_dlp_job_sync.py | 50 - ...p_service_get_deidentify_template_async.py | 52 - ...lp_service_get_deidentify_template_sync.py | 52 - ...generated_dlp_service_get_dlp_job_async.py | 52 - ..._generated_dlp_service_get_dlp_job_sync.py | 52 - ..._dlp_service_get_inspect_template_async.py | 52 - ...d_dlp_service_get_inspect_template_sync.py | 52 - ...rated_dlp_service_get_job_trigger_async.py | 52 - ...erated_dlp_service_get_job_trigger_sync.py | 52 - ..._dlp_service_get_stored_info_type_async.py | 52 - ...d_dlp_service_get_stored_info_type_sync.py | 52 - ...lp_service_hybrid_inspect_dlp_job_async.py | 52 - ...dlp_service_hybrid_inspect_dlp_job_sync.py | 52 - ...ervice_hybrid_inspect_job_trigger_async.py | 52 - ...service_hybrid_inspect_job_trigger_sync.py | 52 - ...rated_dlp_service_inspect_content_async.py | 51 - ...erated_dlp_service_inspect_content_sync.py | 51 - ...service_list_deidentify_templates_async.py | 53 - ..._service_list_deidentify_templates_sync.py | 53 - ...nerated_dlp_service_list_dlp_jobs_async.py | 53 - ...enerated_dlp_service_list_dlp_jobs_sync.py | 53 - ...rated_dlp_service_list_info_types_async.py | 51 - ...erated_dlp_service_list_info_types_sync.py | 51 - ...lp_service_list_inspect_templates_async.py | 53 - ...dlp_service_list_inspect_templates_sync.py | 53 - ...ted_dlp_service_list_job_triggers_async.py | 53 - ...ated_dlp_service_list_job_triggers_sync.py | 53 - ...lp_service_list_stored_info_types_async.py | 53 - ...dlp_service_list_stored_info_types_sync.py | 53 - ...enerated_dlp_service_redact_image_async.py | 51 - ...generated_dlp_service_redact_image_sync.py | 51 - ...ed_dlp_service_reidentify_content_async.py | 52 - ...ted_dlp_service_reidentify_content_sync.py | 52 - ...ervice_update_deidentify_template_async.py | 52 - ...service_update_deidentify_template_sync.py | 52 - ...p_service_update_inspect_template_async.py | 52 - ...lp_service_update_inspect_template_sync.py | 52 - ...ed_dlp_service_update_job_trigger_async.py | 52 - ...ted_dlp_service_update_job_trigger_sync.py | 52 - ...p_service_update_stored_info_type_async.py | 52 - ...lp_service_update_stored_info_type_sync.py | 52 - ...nippet_metadata_google.privacy.dlp.v2.json | 5503 ----- .../v2/scripts/fixup_dlp_v2_keywords.py | 209 - owl-bot-staging/v2/setup.py | 90 - .../v2/testing/constraints-3.10.txt | 6 - .../v2/testing/constraints-3.11.txt | 6 - .../v2/testing/constraints-3.12.txt | 6 - .../v2/testing/constraints-3.7.txt | 9 - .../v2/testing/constraints-3.8.txt | 6 - .../v2/testing/constraints-3.9.txt | 6 - owl-bot-staging/v2/tests/__init__.py | 16 - owl-bot-staging/v2/tests/unit/__init__.py | 16 - .../v2/tests/unit/gapic/__init__.py | 16 - .../v2/tests/unit/gapic/dlp_v2/__init__.py | 16 - .../unit/gapic/dlp_v2/test_dlp_service.py | 17403 ---------------- tests/unit/gapic/dlp_v2/test_dlp_service.py | 9101 +++++++- 118 files changed, 11080 insertions(+), 53514 deletions(-) rename {owl-bot-staging/v2/google => google}/cloud/dlp_v2/services/dlp_service/transports/rest.py (61%) delete mode 100644 owl-bot-staging/v2/.coveragerc delete mode 100644 owl-bot-staging/v2/.flake8 delete mode 100644 owl-bot-staging/v2/MANIFEST.in delete mode 100644 owl-bot-staging/v2/README.rst delete mode 100644 owl-bot-staging/v2/docs/conf.py delete mode 100644 owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst delete mode 100644 owl-bot-staging/v2/docs/dlp_v2/services.rst delete mode 100644 owl-bot-staging/v2/docs/dlp_v2/types.rst delete mode 100644 owl-bot-staging/v2/docs/index.rst delete mode 100644 owl-bot-staging/v2/google/cloud/dlp/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp/gapic_version.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp/py.typed delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/py.typed delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py delete mode 100644 owl-bot-staging/v2/mypy.ini delete mode 100644 owl-bot-staging/v2/noxfile.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json delete mode 100644 owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py delete mode 100644 owl-bot-staging/v2/setup.py delete mode 100644 owl-bot-staging/v2/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/v2/tests/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py diff --git a/google/cloud/dlp_v2/gapic_metadata.json b/google/cloud/dlp_v2/gapic_metadata.json index df73928b..634002d4 100644 --- a/google/cloud/dlp_v2/gapic_metadata.json +++ b/google/cloud/dlp_v2/gapic_metadata.json @@ -356,6 +356,181 @@ ] } } + }, + "rest": { + "libraryClient": "DlpServiceClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } } } } diff --git a/google/cloud/dlp_v2/services/dlp_service/client.py b/google/cloud/dlp_v2/services/dlp_service/client.py index d9f2f134..38635b92 100644 --- a/google/cloud/dlp_v2/services/dlp_service/client.py +++ b/google/cloud/dlp_v2/services/dlp_service/client.py @@ -55,6 +55,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, DlpServiceTransport from .transports.grpc import DlpServiceGrpcTransport from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport +from .transports.rest import DlpServiceRestTransport class DlpServiceClientMeta(type): @@ -68,6 +69,7 @@ class DlpServiceClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] _transport_registry["grpc"] = DlpServiceGrpcTransport _transport_registry["grpc_asyncio"] = DlpServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DlpServiceRestTransport def get_transport_class( cls, diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py b/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py index b781126f..a7d8161e 100644 --- a/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py +++ b/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py @@ -19,14 +19,18 @@ from .base import DlpServiceTransport from .grpc import DlpServiceGrpcTransport from .grpc_asyncio import DlpServiceGrpcAsyncIOTransport +from .rest import DlpServiceRestInterceptor, DlpServiceRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] _transport_registry["grpc"] = DlpServiceGrpcTransport _transport_registry["grpc_asyncio"] = DlpServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DlpServiceRestTransport __all__ = ( "DlpServiceTransport", "DlpServiceGrpcTransport", "DlpServiceGrpcAsyncIOTransport", + "DlpServiceRestTransport", + "DlpServiceRestInterceptor", ) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py b/google/cloud/dlp_v2/services/dlp_service/transports/rest.py similarity index 61% rename from owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py rename to google/cloud/dlp_v2/services/dlp_service/transports/rest.py index fffd577e..b6bfd3c1 100644 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py +++ b/google/cloud/dlp_v2/services/dlp_service/transports/rest.py @@ -14,24 +14,21 @@ # limitations under the License. # -from google.auth.transport.requests import AuthorizedSession # type: ignore +import dataclasses import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore from google.protobuf import json_format +import grpc # type: ignore from requests import __version__ as requests_version -import dataclasses -import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -39,11 +36,12 @@ OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.cloud.dlp_v2.types import dlp from google.protobuf import empty_pb2 # type: ignore -from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from google.cloud.dlp_v2.types import dlp +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DlpServiceTransport DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, @@ -316,7 +314,12 @@ def post_update_stored_info_type(self, response): """ - def pre_activate_job_trigger(self, request: dlp.ActivateJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ActivateJobTriggerRequest, Sequence[Tuple[str, str]]]: + + def pre_activate_job_trigger( + self, + request: dlp.ActivateJobTriggerRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.ActivateJobTriggerRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for activate_job_trigger Override in a subclass to manipulate the request or metadata @@ -332,7 +335,10 @@ def post_activate_job_trigger(self, response: dlp.DlpJob) -> dlp.DlpJob: it is returned to user code. """ return response - def pre_cancel_dlp_job(self, request: dlp.CancelDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CancelDlpJobRequest, Sequence[Tuple[str, str]]]: + + def pre_cancel_dlp_job( + self, request: dlp.CancelDlpJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.CancelDlpJobRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for cancel_dlp_job Override in a subclass to manipulate the request or metadata @@ -340,7 +346,11 @@ def pre_cancel_dlp_job(self, request: dlp.CancelDlpJobRequest, metadata: Sequenc """ return request, metadata - def pre_create_deidentify_template(self, request: dlp.CreateDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + def pre_create_deidentify_template( + self, + request: dlp.CreateDeidentifyTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.CreateDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for create_deidentify_template Override in a subclass to manipulate the request or metadata @@ -348,7 +358,9 @@ def pre_create_deidentify_template(self, request: dlp.CreateDeidentifyTemplateRe """ return request, metadata - def post_create_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: + def post_create_deidentify_template( + self, response: dlp.DeidentifyTemplate + ) -> dlp.DeidentifyTemplate: """Post-rpc interceptor for create_deidentify_template Override in a subclass to manipulate the response @@ -356,7 +368,10 @@ def post_create_deidentify_template(self, response: dlp.DeidentifyTemplate) -> d it is returned to user code. """ return response - def pre_create_dlp_job(self, request: dlp.CreateDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateDlpJobRequest, Sequence[Tuple[str, str]]]: + + def pre_create_dlp_job( + self, request: dlp.CreateDlpJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.CreateDlpJobRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for create_dlp_job Override in a subclass to manipulate the request or metadata @@ -372,7 +387,12 @@ def post_create_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: it is returned to user code. """ return response - def pre_create_inspect_template(self, request: dlp.CreateInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateInspectTemplateRequest, Sequence[Tuple[str, str]]]: + + def pre_create_inspect_template( + self, + request: dlp.CreateInspectTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.CreateInspectTemplateRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for create_inspect_template Override in a subclass to manipulate the request or metadata @@ -380,7 +400,9 @@ def pre_create_inspect_template(self, request: dlp.CreateInspectTemplateRequest, """ return request, metadata - def post_create_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: + def post_create_inspect_template( + self, response: dlp.InspectTemplate + ) -> dlp.InspectTemplate: """Post-rpc interceptor for create_inspect_template Override in a subclass to manipulate the response @@ -388,7 +410,10 @@ def post_create_inspect_template(self, response: dlp.InspectTemplate) -> dlp.Ins it is returned to user code. """ return response - def pre_create_job_trigger(self, request: dlp.CreateJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateJobTriggerRequest, Sequence[Tuple[str, str]]]: + + def pre_create_job_trigger( + self, request: dlp.CreateJobTriggerRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.CreateJobTriggerRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for create_job_trigger Override in a subclass to manipulate the request or metadata @@ -404,7 +429,12 @@ def post_create_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: it is returned to user code. """ return response - def pre_create_stored_info_type(self, request: dlp.CreateStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + + def pre_create_stored_info_type( + self, + request: dlp.CreateStoredInfoTypeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.CreateStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for create_stored_info_type Override in a subclass to manipulate the request or metadata @@ -412,7 +442,9 @@ def pre_create_stored_info_type(self, request: dlp.CreateStoredInfoTypeRequest, """ return request, metadata - def post_create_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: + def post_create_stored_info_type( + self, response: dlp.StoredInfoType + ) -> dlp.StoredInfoType: """Post-rpc interceptor for create_stored_info_type Override in a subclass to manipulate the response @@ -420,7 +452,10 @@ def post_create_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.Stor it is returned to user code. """ return response - def pre_deidentify_content(self, request: dlp.DeidentifyContentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeidentifyContentRequest, Sequence[Tuple[str, str]]]: + + def pre_deidentify_content( + self, request: dlp.DeidentifyContentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.DeidentifyContentRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for deidentify_content Override in a subclass to manipulate the request or metadata @@ -428,7 +463,9 @@ def pre_deidentify_content(self, request: dlp.DeidentifyContentRequest, metadata """ return request, metadata - def post_deidentify_content(self, response: dlp.DeidentifyContentResponse) -> dlp.DeidentifyContentResponse: + def post_deidentify_content( + self, response: dlp.DeidentifyContentResponse + ) -> dlp.DeidentifyContentResponse: """Post-rpc interceptor for deidentify_content Override in a subclass to manipulate the response @@ -436,7 +473,12 @@ def post_deidentify_content(self, response: dlp.DeidentifyContentResponse) -> dl it is returned to user code. """ return response - def pre_delete_deidentify_template(self, request: dlp.DeleteDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + + def pre_delete_deidentify_template( + self, + request: dlp.DeleteDeidentifyTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.DeleteDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_deidentify_template Override in a subclass to manipulate the request or metadata @@ -444,7 +486,9 @@ def pre_delete_deidentify_template(self, request: dlp.DeleteDeidentifyTemplateRe """ return request, metadata - def pre_delete_dlp_job(self, request: dlp.DeleteDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteDlpJobRequest, Sequence[Tuple[str, str]]]: + def pre_delete_dlp_job( + self, request: dlp.DeleteDlpJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.DeleteDlpJobRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_dlp_job Override in a subclass to manipulate the request or metadata @@ -452,7 +496,11 @@ def pre_delete_dlp_job(self, request: dlp.DeleteDlpJobRequest, metadata: Sequenc """ return request, metadata - def pre_delete_inspect_template(self, request: dlp.DeleteInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteInspectTemplateRequest, Sequence[Tuple[str, str]]]: + def pre_delete_inspect_template( + self, + request: dlp.DeleteInspectTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.DeleteInspectTemplateRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_inspect_template Override in a subclass to manipulate the request or metadata @@ -460,7 +508,9 @@ def pre_delete_inspect_template(self, request: dlp.DeleteInspectTemplateRequest, """ return request, metadata - def pre_delete_job_trigger(self, request: dlp.DeleteJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteJobTriggerRequest, Sequence[Tuple[str, str]]]: + def pre_delete_job_trigger( + self, request: dlp.DeleteJobTriggerRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.DeleteJobTriggerRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_job_trigger Override in a subclass to manipulate the request or metadata @@ -468,7 +518,11 @@ def pre_delete_job_trigger(self, request: dlp.DeleteJobTriggerRequest, metadata: """ return request, metadata - def pre_delete_stored_info_type(self, request: dlp.DeleteStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + def pre_delete_stored_info_type( + self, + request: dlp.DeleteStoredInfoTypeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.DeleteStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_stored_info_type Override in a subclass to manipulate the request or metadata @@ -476,7 +530,9 @@ def pre_delete_stored_info_type(self, request: dlp.DeleteStoredInfoTypeRequest, """ return request, metadata - def pre_finish_dlp_job(self, request: dlp.FinishDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.FinishDlpJobRequest, Sequence[Tuple[str, str]]]: + def pre_finish_dlp_job( + self, request: dlp.FinishDlpJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.FinishDlpJobRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for finish_dlp_job Override in a subclass to manipulate the request or metadata @@ -484,7 +540,11 @@ def pre_finish_dlp_job(self, request: dlp.FinishDlpJobRequest, metadata: Sequenc """ return request, metadata - def pre_get_deidentify_template(self, request: dlp.GetDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + def pre_get_deidentify_template( + self, + request: dlp.GetDeidentifyTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.GetDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_deidentify_template Override in a subclass to manipulate the request or metadata @@ -492,7 +552,9 @@ def pre_get_deidentify_template(self, request: dlp.GetDeidentifyTemplateRequest, """ return request, metadata - def post_get_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: + def post_get_deidentify_template( + self, response: dlp.DeidentifyTemplate + ) -> dlp.DeidentifyTemplate: """Post-rpc interceptor for get_deidentify_template Override in a subclass to manipulate the response @@ -500,7 +562,10 @@ def post_get_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp. it is returned to user code. """ return response - def pre_get_dlp_job(self, request: dlp.GetDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetDlpJobRequest, Sequence[Tuple[str, str]]]: + + def pre_get_dlp_job( + self, request: dlp.GetDlpJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.GetDlpJobRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_dlp_job Override in a subclass to manipulate the request or metadata @@ -516,7 +581,12 @@ def post_get_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: it is returned to user code. """ return response - def pre_get_inspect_template(self, request: dlp.GetInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetInspectTemplateRequest, Sequence[Tuple[str, str]]]: + + def pre_get_inspect_template( + self, + request: dlp.GetInspectTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.GetInspectTemplateRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_inspect_template Override in a subclass to manipulate the request or metadata @@ -524,7 +594,9 @@ def pre_get_inspect_template(self, request: dlp.GetInspectTemplateRequest, metad """ return request, metadata - def post_get_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: + def post_get_inspect_template( + self, response: dlp.InspectTemplate + ) -> dlp.InspectTemplate: """Post-rpc interceptor for get_inspect_template Override in a subclass to manipulate the response @@ -532,7 +604,10 @@ def post_get_inspect_template(self, response: dlp.InspectTemplate) -> dlp.Inspec it is returned to user code. """ return response - def pre_get_job_trigger(self, request: dlp.GetJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetJobTriggerRequest, Sequence[Tuple[str, str]]]: + + def pre_get_job_trigger( + self, request: dlp.GetJobTriggerRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.GetJobTriggerRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_job_trigger Override in a subclass to manipulate the request or metadata @@ -548,7 +623,10 @@ def post_get_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: it is returned to user code. """ return response - def pre_get_stored_info_type(self, request: dlp.GetStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + + def pre_get_stored_info_type( + self, request: dlp.GetStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.GetStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_stored_info_type Override in a subclass to manipulate the request or metadata @@ -556,7 +634,9 @@ def pre_get_stored_info_type(self, request: dlp.GetStoredInfoTypeRequest, metada """ return request, metadata - def post_get_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: + def post_get_stored_info_type( + self, response: dlp.StoredInfoType + ) -> dlp.StoredInfoType: """Post-rpc interceptor for get_stored_info_type Override in a subclass to manipulate the response @@ -564,7 +644,12 @@ def post_get_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredI it is returned to user code. """ return response - def pre_hybrid_inspect_dlp_job(self, request: dlp.HybridInspectDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.HybridInspectDlpJobRequest, Sequence[Tuple[str, str]]]: + + def pre_hybrid_inspect_dlp_job( + self, + request: dlp.HybridInspectDlpJobRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.HybridInspectDlpJobRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for hybrid_inspect_dlp_job Override in a subclass to manipulate the request or metadata @@ -572,7 +657,9 @@ def pre_hybrid_inspect_dlp_job(self, request: dlp.HybridInspectDlpJobRequest, me """ return request, metadata - def post_hybrid_inspect_dlp_job(self, response: dlp.HybridInspectResponse) -> dlp.HybridInspectResponse: + def post_hybrid_inspect_dlp_job( + self, response: dlp.HybridInspectResponse + ) -> dlp.HybridInspectResponse: """Post-rpc interceptor for hybrid_inspect_dlp_job Override in a subclass to manipulate the response @@ -580,7 +667,12 @@ def post_hybrid_inspect_dlp_job(self, response: dlp.HybridInspectResponse) -> dl it is returned to user code. """ return response - def pre_hybrid_inspect_job_trigger(self, request: dlp.HybridInspectJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.HybridInspectJobTriggerRequest, Sequence[Tuple[str, str]]]: + + def pre_hybrid_inspect_job_trigger( + self, + request: dlp.HybridInspectJobTriggerRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.HybridInspectJobTriggerRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for hybrid_inspect_job_trigger Override in a subclass to manipulate the request or metadata @@ -588,7 +680,9 @@ def pre_hybrid_inspect_job_trigger(self, request: dlp.HybridInspectJobTriggerReq """ return request, metadata - def post_hybrid_inspect_job_trigger(self, response: dlp.HybridInspectResponse) -> dlp.HybridInspectResponse: + def post_hybrid_inspect_job_trigger( + self, response: dlp.HybridInspectResponse + ) -> dlp.HybridInspectResponse: """Post-rpc interceptor for hybrid_inspect_job_trigger Override in a subclass to manipulate the response @@ -596,7 +690,10 @@ def post_hybrid_inspect_job_trigger(self, response: dlp.HybridInspectResponse) - it is returned to user code. """ return response - def pre_inspect_content(self, request: dlp.InspectContentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.InspectContentRequest, Sequence[Tuple[str, str]]]: + + def pre_inspect_content( + self, request: dlp.InspectContentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.InspectContentRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for inspect_content Override in a subclass to manipulate the request or metadata @@ -604,7 +701,9 @@ def pre_inspect_content(self, request: dlp.InspectContentRequest, metadata: Sequ """ return request, metadata - def post_inspect_content(self, response: dlp.InspectContentResponse) -> dlp.InspectContentResponse: + def post_inspect_content( + self, response: dlp.InspectContentResponse + ) -> dlp.InspectContentResponse: """Post-rpc interceptor for inspect_content Override in a subclass to manipulate the response @@ -612,7 +711,12 @@ def post_inspect_content(self, response: dlp.InspectContentResponse) -> dlp.Insp it is returned to user code. """ return response - def pre_list_deidentify_templates(self, request: dlp.ListDeidentifyTemplatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListDeidentifyTemplatesRequest, Sequence[Tuple[str, str]]]: + + def pre_list_deidentify_templates( + self, + request: dlp.ListDeidentifyTemplatesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.ListDeidentifyTemplatesRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_deidentify_templates Override in a subclass to manipulate the request or metadata @@ -620,7 +724,9 @@ def pre_list_deidentify_templates(self, request: dlp.ListDeidentifyTemplatesRequ """ return request, metadata - def post_list_deidentify_templates(self, response: dlp.ListDeidentifyTemplatesResponse) -> dlp.ListDeidentifyTemplatesResponse: + def post_list_deidentify_templates( + self, response: dlp.ListDeidentifyTemplatesResponse + ) -> dlp.ListDeidentifyTemplatesResponse: """Post-rpc interceptor for list_deidentify_templates Override in a subclass to manipulate the response @@ -628,7 +734,10 @@ def post_list_deidentify_templates(self, response: dlp.ListDeidentifyTemplatesRe it is returned to user code. """ return response - def pre_list_dlp_jobs(self, request: dlp.ListDlpJobsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListDlpJobsRequest, Sequence[Tuple[str, str]]]: + + def pre_list_dlp_jobs( + self, request: dlp.ListDlpJobsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.ListDlpJobsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_dlp_jobs Override in a subclass to manipulate the request or metadata @@ -636,7 +745,9 @@ def pre_list_dlp_jobs(self, request: dlp.ListDlpJobsRequest, metadata: Sequence[ """ return request, metadata - def post_list_dlp_jobs(self, response: dlp.ListDlpJobsResponse) -> dlp.ListDlpJobsResponse: + def post_list_dlp_jobs( + self, response: dlp.ListDlpJobsResponse + ) -> dlp.ListDlpJobsResponse: """Post-rpc interceptor for list_dlp_jobs Override in a subclass to manipulate the response @@ -644,7 +755,10 @@ def post_list_dlp_jobs(self, response: dlp.ListDlpJobsResponse) -> dlp.ListDlpJo it is returned to user code. """ return response - def pre_list_info_types(self, request: dlp.ListInfoTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListInfoTypesRequest, Sequence[Tuple[str, str]]]: + + def pre_list_info_types( + self, request: dlp.ListInfoTypesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.ListInfoTypesRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_info_types Override in a subclass to manipulate the request or metadata @@ -652,7 +766,9 @@ def pre_list_info_types(self, request: dlp.ListInfoTypesRequest, metadata: Seque """ return request, metadata - def post_list_info_types(self, response: dlp.ListInfoTypesResponse) -> dlp.ListInfoTypesResponse: + def post_list_info_types( + self, response: dlp.ListInfoTypesResponse + ) -> dlp.ListInfoTypesResponse: """Post-rpc interceptor for list_info_types Override in a subclass to manipulate the response @@ -660,7 +776,12 @@ def post_list_info_types(self, response: dlp.ListInfoTypesResponse) -> dlp.ListI it is returned to user code. """ return response - def pre_list_inspect_templates(self, request: dlp.ListInspectTemplatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListInspectTemplatesRequest, Sequence[Tuple[str, str]]]: + + def pre_list_inspect_templates( + self, + request: dlp.ListInspectTemplatesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.ListInspectTemplatesRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_inspect_templates Override in a subclass to manipulate the request or metadata @@ -668,7 +789,9 @@ def pre_list_inspect_templates(self, request: dlp.ListInspectTemplatesRequest, m """ return request, metadata - def post_list_inspect_templates(self, response: dlp.ListInspectTemplatesResponse) -> dlp.ListInspectTemplatesResponse: + def post_list_inspect_templates( + self, response: dlp.ListInspectTemplatesResponse + ) -> dlp.ListInspectTemplatesResponse: """Post-rpc interceptor for list_inspect_templates Override in a subclass to manipulate the response @@ -676,7 +799,10 @@ def post_list_inspect_templates(self, response: dlp.ListInspectTemplatesResponse it is returned to user code. """ return response - def pre_list_job_triggers(self, request: dlp.ListJobTriggersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListJobTriggersRequest, Sequence[Tuple[str, str]]]: + + def pre_list_job_triggers( + self, request: dlp.ListJobTriggersRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.ListJobTriggersRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_job_triggers Override in a subclass to manipulate the request or metadata @@ -684,7 +810,9 @@ def pre_list_job_triggers(self, request: dlp.ListJobTriggersRequest, metadata: S """ return request, metadata - def post_list_job_triggers(self, response: dlp.ListJobTriggersResponse) -> dlp.ListJobTriggersResponse: + def post_list_job_triggers( + self, response: dlp.ListJobTriggersResponse + ) -> dlp.ListJobTriggersResponse: """Post-rpc interceptor for list_job_triggers Override in a subclass to manipulate the response @@ -692,7 +820,12 @@ def post_list_job_triggers(self, response: dlp.ListJobTriggersResponse) -> dlp.L it is returned to user code. """ return response - def pre_list_stored_info_types(self, request: dlp.ListStoredInfoTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListStoredInfoTypesRequest, Sequence[Tuple[str, str]]]: + + def pre_list_stored_info_types( + self, + request: dlp.ListStoredInfoTypesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.ListStoredInfoTypesRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_stored_info_types Override in a subclass to manipulate the request or metadata @@ -700,7 +833,9 @@ def pre_list_stored_info_types(self, request: dlp.ListStoredInfoTypesRequest, me """ return request, metadata - def post_list_stored_info_types(self, response: dlp.ListStoredInfoTypesResponse) -> dlp.ListStoredInfoTypesResponse: + def post_list_stored_info_types( + self, response: dlp.ListStoredInfoTypesResponse + ) -> dlp.ListStoredInfoTypesResponse: """Post-rpc interceptor for list_stored_info_types Override in a subclass to manipulate the response @@ -708,7 +843,10 @@ def post_list_stored_info_types(self, response: dlp.ListStoredInfoTypesResponse) it is returned to user code. """ return response - def pre_redact_image(self, request: dlp.RedactImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.RedactImageRequest, Sequence[Tuple[str, str]]]: + + def pre_redact_image( + self, request: dlp.RedactImageRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.RedactImageRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for redact_image Override in a subclass to manipulate the request or metadata @@ -716,7 +854,9 @@ def pre_redact_image(self, request: dlp.RedactImageRequest, metadata: Sequence[T """ return request, metadata - def post_redact_image(self, response: dlp.RedactImageResponse) -> dlp.RedactImageResponse: + def post_redact_image( + self, response: dlp.RedactImageResponse + ) -> dlp.RedactImageResponse: """Post-rpc interceptor for redact_image Override in a subclass to manipulate the response @@ -724,7 +864,10 @@ def post_redact_image(self, response: dlp.RedactImageResponse) -> dlp.RedactImag it is returned to user code. """ return response - def pre_reidentify_content(self, request: dlp.ReidentifyContentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ReidentifyContentRequest, Sequence[Tuple[str, str]]]: + + def pre_reidentify_content( + self, request: dlp.ReidentifyContentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.ReidentifyContentRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for reidentify_content Override in a subclass to manipulate the request or metadata @@ -732,7 +875,9 @@ def pre_reidentify_content(self, request: dlp.ReidentifyContentRequest, metadata """ return request, metadata - def post_reidentify_content(self, response: dlp.ReidentifyContentResponse) -> dlp.ReidentifyContentResponse: + def post_reidentify_content( + self, response: dlp.ReidentifyContentResponse + ) -> dlp.ReidentifyContentResponse: """Post-rpc interceptor for reidentify_content Override in a subclass to manipulate the response @@ -740,7 +885,12 @@ def post_reidentify_content(self, response: dlp.ReidentifyContentResponse) -> dl it is returned to user code. """ return response - def pre_update_deidentify_template(self, request: dlp.UpdateDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + + def pre_update_deidentify_template( + self, + request: dlp.UpdateDeidentifyTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.UpdateDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for update_deidentify_template Override in a subclass to manipulate the request or metadata @@ -748,7 +898,9 @@ def pre_update_deidentify_template(self, request: dlp.UpdateDeidentifyTemplateRe """ return request, metadata - def post_update_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: + def post_update_deidentify_template( + self, response: dlp.DeidentifyTemplate + ) -> dlp.DeidentifyTemplate: """Post-rpc interceptor for update_deidentify_template Override in a subclass to manipulate the response @@ -756,7 +908,12 @@ def post_update_deidentify_template(self, response: dlp.DeidentifyTemplate) -> d it is returned to user code. """ return response - def pre_update_inspect_template(self, request: dlp.UpdateInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateInspectTemplateRequest, Sequence[Tuple[str, str]]]: + + def pre_update_inspect_template( + self, + request: dlp.UpdateInspectTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.UpdateInspectTemplateRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for update_inspect_template Override in a subclass to manipulate the request or metadata @@ -764,7 +921,9 @@ def pre_update_inspect_template(self, request: dlp.UpdateInspectTemplateRequest, """ return request, metadata - def post_update_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: + def post_update_inspect_template( + self, response: dlp.InspectTemplate + ) -> dlp.InspectTemplate: """Post-rpc interceptor for update_inspect_template Override in a subclass to manipulate the response @@ -772,7 +931,10 @@ def post_update_inspect_template(self, response: dlp.InspectTemplate) -> dlp.Ins it is returned to user code. """ return response - def pre_update_job_trigger(self, request: dlp.UpdateJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateJobTriggerRequest, Sequence[Tuple[str, str]]]: + + def pre_update_job_trigger( + self, request: dlp.UpdateJobTriggerRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.UpdateJobTriggerRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for update_job_trigger Override in a subclass to manipulate the request or metadata @@ -788,7 +950,12 @@ def post_update_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: it is returned to user code. """ return response - def pre_update_stored_info_type(self, request: dlp.UpdateStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + + def pre_update_stored_info_type( + self, + request: dlp.UpdateStoredInfoTypeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.UpdateStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for update_stored_info_type Override in a subclass to manipulate the request or metadata @@ -796,7 +963,9 @@ def pre_update_stored_info_type(self, request: dlp.UpdateStoredInfoTypeRequest, """ return request, metadata - def post_update_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: + def post_update_stored_info_type( + self, response: dlp.StoredInfoType + ) -> dlp.StoredInfoType: """Post-rpc interceptor for update_stored_info_type Override in a subclass to manipulate the response @@ -835,20 +1004,21 @@ class DlpServiceRestTransport(DlpServiceTransport): """ - def __init__(self, *, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[DlpServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "dlp.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DlpServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -887,7 +1057,9 @@ def __init__(self, *, # credentials object maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER url_match_items = maybe_url_match.groupdict() @@ -898,10 +1070,11 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._credentials, default_host=self.DEFAULT_HOST + ) if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) self._interceptor = interceptor or DlpServiceRestInterceptor() @@ -911,19 +1084,24 @@ class _ActivateJobTrigger(DlpServiceRestStub): def __hash__(self): return hash("ActivateJobTrigger") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ActivateJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DlpJob: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.ActivateJobTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: r"""Call the activate job trigger method over HTTP. Args: @@ -944,51 +1122,56 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/jobTriggers/*}:activate', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}:activate', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/jobTriggers/*}:activate", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/jobTriggers/*}:activate", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_activate_job_trigger(request, metadata) + request, metadata = self._interceptor.pre_activate_job_trigger( + request, metadata + ) pb_request = dlp.ActivateJobTriggerRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], + transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=True + use_integers_for_enums=True, ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1007,19 +1190,24 @@ class _CancelDlpJob(DlpServiceRestStub): def __hash__(self): return hash("CancelDlpJob") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.CancelDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.CancelDlpJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): r"""Call the cancel dlp job method over HTTP. Args: @@ -1034,16 +1222,17 @@ def __call__(self, sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/dlpJobs/*}:cancel', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:cancel', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/dlpJobs/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/dlpJobs/*}:cancel", + "body": "*", + }, ] request, metadata = self._interceptor.pre_cancel_dlp_job(request, metadata) pb_request = dlp.CancelDlpJobRequest.pb(request) @@ -1052,33 +1241,35 @@ def __call__(self, # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], + transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=True + use_integers_for_enums=True, ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1089,98 +1280,108 @@ class _CreateDeidentifyTemplate(DlpServiceRestStub): def __hash__(self): return hash("CreateDeidentifyTemplate") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.CreateDeidentifyTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DeidentifyTemplate: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.CreateDeidentifyTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: r"""Call the create deidentify - template method over HTTP. - - Args: - request (~.dlp.CreateDeidentifyTemplateRequest): - The request object. Request message for - CreateDeidentifyTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. + template method over HTTP. + + Args: + request (~.dlp.CreateDeidentifyTemplateRequest): + The request object. Request message for + CreateDeidentifyTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*}/deidentifyTemplates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/deidentifyTemplates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/deidentifyTemplates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/deidentifyTemplates', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=organizations/*}/deidentifyTemplates", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=organizations/*/locations/*}/deidentifyTemplates", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*}/deidentifyTemplates", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/deidentifyTemplates", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_create_deidentify_template(request, metadata) + request, metadata = self._interceptor.pre_create_deidentify_template( + request, metadata + ) pb_request = dlp.CreateDeidentifyTemplateRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], + transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=True + use_integers_for_enums=True, ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1199,19 +1400,24 @@ class _CreateDlpJob(DlpServiceRestStub): def __hash__(self): return hash("CreateDlpJob") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.CreateDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DlpJob: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.CreateDlpJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: r"""Call the create dlp job method over HTTP. Args: @@ -1235,16 +1441,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/dlpJobs', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/dlpJobs', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*}/dlpJobs", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/dlpJobs", + "body": "*", + }, ] request, metadata = self._interceptor.pre_create_dlp_job(request, metadata) pb_request = dlp.CreateDlpJobRequest.pb(request) @@ -1253,33 +1460,35 @@ def __call__(self, # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], + transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=True + use_integers_for_enums=True, ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1298,19 +1507,24 @@ class _CreateInspectTemplate(DlpServiceRestStub): def __hash__(self): return hash("CreateInspectTemplate") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.CreateInspectTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.InspectTemplate: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.CreateInspectTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: r"""Call the create inspect template method over HTTP. Args: @@ -1336,61 +1550,66 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*}/inspectTemplates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/inspectTemplates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/inspectTemplates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/inspectTemplates', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=organizations/*}/inspectTemplates", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=organizations/*/locations/*}/inspectTemplates", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*}/inspectTemplates", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/inspectTemplates", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_create_inspect_template(request, metadata) + request, metadata = self._interceptor.pre_create_inspect_template( + request, metadata + ) pb_request = dlp.CreateInspectTemplateRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], + transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=True + use_integers_for_enums=True, ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1409,19 +1628,24 @@ class _CreateJobTrigger(DlpServiceRestStub): def __hash__(self): return hash("CreateJobTrigger") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.CreateJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.JobTrigger: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.CreateJobTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: r"""Call the create job trigger method over HTTP. Args: @@ -1442,56 +1666,61 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/jobTriggers', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/jobTriggers', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/jobTriggers', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*}/jobTriggers", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/jobTriggers", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=organizations/*/locations/*}/jobTriggers", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_create_job_trigger(request, metadata) + request, metadata = self._interceptor.pre_create_job_trigger( + request, metadata + ) pb_request = dlp.CreateJobTriggerRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], + transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=True + use_integers_for_enums=True, ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1510,19 +1739,24 @@ class _CreateStoredInfoType(DlpServiceRestStub): def __hash__(self): return hash("CreateStoredInfoType") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.CreateStoredInfoTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.StoredInfoType: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.CreateStoredInfoTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: r"""Call the create stored info type method over HTTP. Args: @@ -1544,61 +1778,66 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*}/storedInfoTypes', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/storedInfoTypes', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/storedInfoTypes', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/storedInfoTypes', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=organizations/*}/storedInfoTypes", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=organizations/*/locations/*}/storedInfoTypes", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*}/storedInfoTypes", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/storedInfoTypes", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_create_stored_info_type(request, metadata) + request, metadata = self._interceptor.pre_create_stored_info_type( + request, metadata + ) pb_request = dlp.CreateStoredInfoTypeRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], + transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=True + use_integers_for_enums=True, ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1617,12 +1856,14 @@ class _DeidentifyContent(DlpServiceRestStub): def __hash__(self): return hash("DeidentifyContent") - def __call__(self, - request: dlp.DeidentifyContentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DeidentifyContentResponse: + def __call__( + self, + request: dlp.DeidentifyContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyContentResponse: r"""Call the deidentify content method over HTTP. Args: @@ -1641,50 +1882,55 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/content:deidentify', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/content:deidentify', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*}/content:deidentify", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/content:deidentify", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_deidentify_content(request, metadata) + request, metadata = self._interceptor.pre_deidentify_content( + request, metadata + ) pb_request = dlp.DeidentifyContentRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], + transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=True + use_integers_for_enums=True, ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1703,77 +1949,87 @@ class _DeleteDeidentifyTemplate(DlpServiceRestStub): def __hash__(self): return hash("DeleteDeidentifyTemplate") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.DeleteDeidentifyTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.DeleteDeidentifyTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): r"""Call the delete deidentify - template method over HTTP. - - Args: - request (~.dlp.DeleteDeidentifyTemplateRequest): - The request object. Request message for - DeleteDeidentifyTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + template method over HTTP. + + Args: + request (~.dlp.DeleteDeidentifyTemplateRequest): + The request object. Request message for + DeleteDeidentifyTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2/{name=organizations/*/deidentifyTemplates/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=projects/*/deidentifyTemplates/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=projects/*/locations/*/deidentifyTemplates/*}", + }, ] - request, metadata = self._interceptor.pre_delete_deidentify_template(request, metadata) + request, metadata = self._interceptor.pre_delete_deidentify_template( + request, metadata + ) pb_request = dlp.DeleteDeidentifyTemplateRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1784,19 +2040,24 @@ class _DeleteDlpJob(DlpServiceRestStub): def __hash__(self): return hash("DeleteDlpJob") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.DeleteDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.DeleteDlpJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): r"""Call the delete dlp job method over HTTP. Args: @@ -1811,41 +2072,44 @@ def __call__(self, sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/dlpJobs/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2/{name=projects/*/dlpJobs/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=projects/*/locations/*/dlpJobs/*}", + }, ] request, metadata = self._interceptor.pre_delete_dlp_job(request, metadata) pb_request = dlp.DeleteDlpJobRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1856,19 +2120,24 @@ class _DeleteInspectTemplate(DlpServiceRestStub): def __hash__(self): return hash("DeleteInspectTemplate") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.DeleteInspectTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.DeleteInspectTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): r"""Call the delete inspect template method over HTTP. Args: @@ -1883,49 +2152,54 @@ def __call__(self, sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/inspectTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2/{name=organizations/*/inspectTemplates/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=organizations/*/locations/*/inspectTemplates/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=projects/*/inspectTemplates/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=projects/*/locations/*/inspectTemplates/*}", + }, ] - request, metadata = self._interceptor.pre_delete_inspect_template(request, metadata) + request, metadata = self._interceptor.pre_delete_inspect_template( + request, metadata + ) pb_request = dlp.DeleteInspectTemplateRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1936,19 +2210,24 @@ class _DeleteJobTrigger(DlpServiceRestStub): def __hash__(self): return hash("DeleteJobTrigger") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.DeleteJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.DeleteJobTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): r"""Call the delete job trigger method over HTTP. Args: @@ -1961,45 +2240,50 @@ def __call__(self, sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/jobTriggers/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2/{name=projects/*/jobTriggers/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=projects/*/locations/*/jobTriggers/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=organizations/*/locations/*/jobTriggers/*}", + }, ] - request, metadata = self._interceptor.pre_delete_job_trigger(request, metadata) + request, metadata = self._interceptor.pre_delete_job_trigger( + request, metadata + ) pb_request = dlp.DeleteJobTriggerRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2010,19 +2294,24 @@ class _DeleteStoredInfoType(DlpServiceRestStub): def __hash__(self): return hash("DeleteStoredInfoType") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.DeleteStoredInfoTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.DeleteStoredInfoTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): r"""Call the delete stored info type method over HTTP. Args: @@ -2037,49 +2326,54 @@ def __call__(self, sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2/{name=organizations/*/storedInfoTypes/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=organizations/*/locations/*/storedInfoTypes/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=projects/*/storedInfoTypes/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=projects/*/locations/*/storedInfoTypes/*}", + }, ] - request, metadata = self._interceptor.pre_delete_stored_info_type(request, metadata) + request, metadata = self._interceptor.pre_delete_stored_info_type( + request, metadata + ) pb_request = dlp.DeleteStoredInfoTypeRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2090,19 +2384,24 @@ class _FinishDlpJob(DlpServiceRestStub): def __hash__(self): return hash("FinishDlpJob") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.FinishDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.FinishDlpJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): r"""Call the finish dlp job method over HTTP. Args: @@ -2117,11 +2416,12 @@ def __call__(self, sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:finish', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/dlpJobs/*}:finish", + "body": "*", + }, ] request, metadata = self._interceptor.pre_finish_dlp_job(request, metadata) pb_request = dlp.FinishDlpJobRequest.pb(request) @@ -2130,33 +2430,35 @@ def __call__(self, # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], + transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=True + use_integers_for_enums=True, ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2167,19 +2469,24 @@ class _GetDeidentifyTemplate(DlpServiceRestStub): def __hash__(self): return hash("GetDeidentifyTemplate") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.GetDeidentifyTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DeidentifyTemplate: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.GetDeidentifyTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: r"""Call the get deidentify template method over HTTP. Args: @@ -2203,49 +2510,54 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=organizations/*/deidentifyTemplates/*}", + }, + { + "method": "get", + "uri": "/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}", + }, + { + "method": "get", + "uri": "/v2/{name=projects/*/deidentifyTemplates/*}", + }, + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/deidentifyTemplates/*}", + }, ] - request, metadata = self._interceptor.pre_get_deidentify_template(request, metadata) + request, metadata = self._interceptor.pre_get_deidentify_template( + request, metadata + ) pb_request = dlp.GetDeidentifyTemplateRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2264,19 +2576,24 @@ class _GetDlpJob(DlpServiceRestStub): def __hash__(self): return hash("GetDlpJob") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.GetDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DlpJob: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.GetDlpJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: r"""Call the get dlp job method over HTTP. Args: @@ -2295,41 +2612,44 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/dlpJobs/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/dlpJobs/*}", + }, + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/dlpJobs/*}", + }, ] request, metadata = self._interceptor.pre_get_dlp_job(request, metadata) pb_request = dlp.GetDlpJobRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2348,19 +2668,24 @@ class _GetInspectTemplate(DlpServiceRestStub): def __hash__(self): return hash("GetInspectTemplate") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.GetInspectTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.InspectTemplate: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.GetInspectTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: r"""Call the get inspect template method over HTTP. Args: @@ -2386,49 +2711,54 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/inspectTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=organizations/*/inspectTemplates/*}", + }, + { + "method": "get", + "uri": "/v2/{name=organizations/*/locations/*/inspectTemplates/*}", + }, + { + "method": "get", + "uri": "/v2/{name=projects/*/inspectTemplates/*}", + }, + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/inspectTemplates/*}", + }, ] - request, metadata = self._interceptor.pre_get_inspect_template(request, metadata) + request, metadata = self._interceptor.pre_get_inspect_template( + request, metadata + ) pb_request = dlp.GetInspectTemplateRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2447,19 +2777,24 @@ class _GetJobTrigger(DlpServiceRestStub): def __hash__(self): return hash("GetJobTrigger") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.GetJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.JobTrigger: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.GetJobTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: r"""Call the get job trigger method over HTTP. Args: @@ -2480,45 +2815,48 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/jobTriggers/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/jobTriggers/*}", + }, + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/jobTriggers/*}", + }, + { + "method": "get", + "uri": "/v2/{name=organizations/*/locations/*/jobTriggers/*}", + }, ] request, metadata = self._interceptor.pre_get_job_trigger(request, metadata) pb_request = dlp.GetJobTriggerRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2537,19 +2875,24 @@ class _GetStoredInfoType(DlpServiceRestStub): def __hash__(self): return hash("GetStoredInfoType") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.GetStoredInfoTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.StoredInfoType: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.GetStoredInfoTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: r"""Call the get stored info type method over HTTP. Args: @@ -2571,49 +2914,54 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=organizations/*/storedInfoTypes/*}", + }, + { + "method": "get", + "uri": "/v2/{name=organizations/*/locations/*/storedInfoTypes/*}", + }, + { + "method": "get", + "uri": "/v2/{name=projects/*/storedInfoTypes/*}", + }, + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/storedInfoTypes/*}", + }, ] - request, metadata = self._interceptor.pre_get_stored_info_type(request, metadata) + request, metadata = self._interceptor.pre_get_stored_info_type( + request, metadata + ) pb_request = dlp.GetStoredInfoTypeRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2632,19 +2980,24 @@ class _HybridInspectDlpJob(DlpServiceRestStub): def __hash__(self): return hash("HybridInspectDlpJob") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.HybridInspectDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.HybridInspectResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.HybridInspectDlpJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: r"""Call the hybrid inspect dlp job method over HTTP. Args: @@ -2665,46 +3018,51 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_hybrid_inspect_dlp_job(request, metadata) + request, metadata = self._interceptor.pre_hybrid_inspect_dlp_job( + request, metadata + ) pb_request = dlp.HybridInspectDlpJobRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], + transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=True + use_integers_for_enums=True, ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2723,80 +3081,90 @@ class _HybridInspectJobTrigger(DlpServiceRestStub): def __hash__(self): return hash("HybridInspectJobTrigger") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.HybridInspectJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.HybridInspectResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.HybridInspectJobTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: r"""Call the hybrid inspect job - trigger method over HTTP. + trigger method over HTTP. - Args: - request (~.dlp.HybridInspectJobTriggerRequest): - The request object. Request to search for potentially - sensitive info in a custom location. + Args: + request (~.dlp.HybridInspectJobTriggerRequest): + The request object. Request to search for potentially + sensitive info in a custom location. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - Returns: - ~.dlp.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. + Returns: + ~.dlp.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_hybrid_inspect_job_trigger(request, metadata) + request, metadata = self._interceptor.pre_hybrid_inspect_job_trigger( + request, metadata + ) pb_request = dlp.HybridInspectJobTriggerRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], + transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=True + use_integers_for_enums=True, ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2815,12 +3183,14 @@ class _InspectContent(DlpServiceRestStub): def __hash__(self): return hash("InspectContent") - def __call__(self, - request: dlp.InspectContentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.InspectContentResponse: + def __call__( + self, + request: dlp.InspectContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectContentResponse: r"""Call the inspect content method over HTTP. Args: @@ -2839,16 +3209,17 @@ def __call__(self, Results of inspecting an item. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/content:inspect', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/content:inspect', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*}/content:inspect", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/content:inspect", + "body": "*", + }, ] request, metadata = self._interceptor.pre_inspect_content(request, metadata) pb_request = dlp.InspectContentRequest.pb(request) @@ -2857,32 +3228,34 @@ def __call__(self, # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], + transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=True + use_integers_for_enums=True, ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2901,19 +3274,24 @@ class _ListDeidentifyTemplates(DlpServiceRestStub): def __hash__(self): return hash("ListDeidentifyTemplates") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ListDeidentifyTemplatesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ListDeidentifyTemplatesResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.ListDeidentifyTemplatesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListDeidentifyTemplatesResponse: r"""Call the list deidentify templates method over HTTP. Args: @@ -2934,49 +3312,54 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*}/deidentifyTemplates', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/deidentifyTemplates', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/deidentifyTemplates', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/deidentifyTemplates', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=organizations/*}/deidentifyTemplates", + }, + { + "method": "get", + "uri": "/v2/{parent=organizations/*/locations/*}/deidentifyTemplates", + }, + { + "method": "get", + "uri": "/v2/{parent=projects/*}/deidentifyTemplates", + }, + { + "method": "get", + "uri": "/v2/{parent=projects/*/locations/*}/deidentifyTemplates", + }, ] - request, metadata = self._interceptor.pre_list_deidentify_templates(request, metadata) + request, metadata = self._interceptor.pre_list_deidentify_templates( + request, metadata + ) pb_request = dlp.ListDeidentifyTemplatesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2995,19 +3378,24 @@ class _ListDlpJobs(DlpServiceRestStub): def __hash__(self): return hash("ListDlpJobs") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ListDlpJobsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ListDlpJobsResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.ListDlpJobsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListDlpJobsResponse: r"""Call the list dlp jobs method over HTTP. Args: @@ -3028,45 +3416,48 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/dlpJobs', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/dlpJobs', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/dlpJobs', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=projects/*}/dlpJobs", + }, + { + "method": "get", + "uri": "/v2/{parent=projects/*/locations/*}/dlpJobs", + }, + { + "method": "get", + "uri": "/v2/{parent=organizations/*/locations/*}/dlpJobs", + }, ] request, metadata = self._interceptor.pre_list_dlp_jobs(request, metadata) pb_request = dlp.ListDlpJobsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3085,12 +3476,14 @@ class _ListInfoTypes(DlpServiceRestStub): def __hash__(self): return hash("ListInfoTypes") - def __call__(self, - request: dlp.ListInfoTypesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ListInfoTypesResponse: + def __call__( + self, + request: dlp.ListInfoTypesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListInfoTypesResponse: r"""Call the list info types method over HTTP. Args: @@ -3109,40 +3502,43 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/infoTypes', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=locations/*}/infoTypes', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/infoTypes", + }, + { + "method": "get", + "uri": "/v2/{parent=locations/*}/infoTypes", + }, ] request, metadata = self._interceptor.pre_list_info_types(request, metadata) pb_request = dlp.ListInfoTypesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3161,19 +3557,24 @@ class _ListInspectTemplates(DlpServiceRestStub): def __hash__(self): return hash("ListInspectTemplates") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ListInspectTemplatesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ListInspectTemplatesResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.ListInspectTemplatesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListInspectTemplatesResponse: r"""Call the list inspect templates method over HTTP. Args: @@ -3194,49 +3595,54 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*}/inspectTemplates', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/inspectTemplates', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/inspectTemplates', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/inspectTemplates', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=organizations/*}/inspectTemplates", + }, + { + "method": "get", + "uri": "/v2/{parent=organizations/*/locations/*}/inspectTemplates", + }, + { + "method": "get", + "uri": "/v2/{parent=projects/*}/inspectTemplates", + }, + { + "method": "get", + "uri": "/v2/{parent=projects/*/locations/*}/inspectTemplates", + }, ] - request, metadata = self._interceptor.pre_list_inspect_templates(request, metadata) + request, metadata = self._interceptor.pre_list_inspect_templates( + request, metadata + ) pb_request = dlp.ListInspectTemplatesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3255,19 +3661,24 @@ class _ListJobTriggers(DlpServiceRestStub): def __hash__(self): return hash("ListJobTriggers") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ListJobTriggersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ListJobTriggersResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.ListJobTriggersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListJobTriggersResponse: r"""Call the list job triggers method over HTTP. Args: @@ -3284,45 +3695,50 @@ def __call__(self, Response message for ListJobTriggers. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/jobTriggers', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/jobTriggers', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/jobTriggers', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=projects/*}/jobTriggers", + }, + { + "method": "get", + "uri": "/v2/{parent=projects/*/locations/*}/jobTriggers", + }, + { + "method": "get", + "uri": "/v2/{parent=organizations/*/locations/*}/jobTriggers", + }, ] - request, metadata = self._interceptor.pre_list_job_triggers(request, metadata) + request, metadata = self._interceptor.pre_list_job_triggers( + request, metadata + ) pb_request = dlp.ListJobTriggersRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3341,19 +3757,24 @@ class _ListStoredInfoTypes(DlpServiceRestStub): def __hash__(self): return hash("ListStoredInfoTypes") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ListStoredInfoTypesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ListStoredInfoTypesResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.ListStoredInfoTypesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListStoredInfoTypesResponse: r"""Call the list stored info types method over HTTP. Args: @@ -3374,49 +3795,54 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*}/storedInfoTypes', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/storedInfoTypes', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/storedInfoTypes', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/storedInfoTypes', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=organizations/*}/storedInfoTypes", + }, + { + "method": "get", + "uri": "/v2/{parent=organizations/*/locations/*}/storedInfoTypes", + }, + { + "method": "get", + "uri": "/v2/{parent=projects/*}/storedInfoTypes", + }, + { + "method": "get", + "uri": "/v2/{parent=projects/*/locations/*}/storedInfoTypes", + }, ] - request, metadata = self._interceptor.pre_list_stored_info_types(request, metadata) + request, metadata = self._interceptor.pre_list_stored_info_types( + request, metadata + ) pb_request = dlp.ListStoredInfoTypesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3435,12 +3861,14 @@ class _RedactImage(DlpServiceRestStub): def __hash__(self): return hash("RedactImage") - def __call__(self, - request: dlp.RedactImageRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.RedactImageResponse: + def __call__( + self, + request: dlp.RedactImageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.RedactImageResponse: r"""Call the redact image method over HTTP. Args: @@ -3460,16 +3888,17 @@ def __call__(self, Results of redacting an image. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/image:redact', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/image:redact', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*}/image:redact", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/image:redact", + "body": "*", + }, ] request, metadata = self._interceptor.pre_redact_image(request, metadata) pb_request = dlp.RedactImageRequest.pb(request) @@ -3478,32 +3907,34 @@ def __call__(self, # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], + transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=True + use_integers_for_enums=True, ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3522,19 +3953,24 @@ class _ReidentifyContent(DlpServiceRestStub): def __hash__(self): return hash("ReidentifyContent") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ReidentifyContentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ReidentifyContentResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.ReidentifyContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ReidentifyContentResponse: r"""Call the reidentify content method over HTTP. Args: @@ -3551,51 +3987,56 @@ def __call__(self, Results of re-identifying an item. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/content:reidentify', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/content:reidentify', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*}/content:reidentify", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/content:reidentify", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_reidentify_content(request, metadata) + request, metadata = self._interceptor.pre_reidentify_content( + request, metadata + ) pb_request = dlp.ReidentifyContentRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], + transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=True + use_integers_for_enums=True, ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3614,98 +4055,108 @@ class _UpdateDeidentifyTemplate(DlpServiceRestStub): def __hash__(self): return hash("UpdateDeidentifyTemplate") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.UpdateDeidentifyTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DeidentifyTemplate: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.UpdateDeidentifyTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: r"""Call the update deidentify - template method over HTTP. - - Args: - request (~.dlp.UpdateDeidentifyTemplateRequest): - The request object. Request message for - UpdateDeidentifyTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. + template method over HTTP. + + Args: + request (~.dlp.UpdateDeidentifyTemplateRequest): + The request object. Request message for + UpdateDeidentifyTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v2/{name=organizations/*/deidentifyTemplates/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=projects/*/deidentifyTemplates/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=projects/*/locations/*/deidentifyTemplates/*}", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_update_deidentify_template(request, metadata) + request, metadata = self._interceptor.pre_update_deidentify_template( + request, metadata + ) pb_request = dlp.UpdateDeidentifyTemplateRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], + transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=True + use_integers_for_enums=True, ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3724,19 +4175,24 @@ class _UpdateInspectTemplate(DlpServiceRestStub): def __hash__(self): return hash("UpdateInspectTemplate") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.UpdateInspectTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.InspectTemplate: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.UpdateInspectTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: r"""Call the update inspect template method over HTTP. Args: @@ -3762,61 +4218,66 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/inspectTemplates/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v2/{name=organizations/*/inspectTemplates/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=organizations/*/locations/*/inspectTemplates/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=projects/*/inspectTemplates/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=projects/*/locations/*/inspectTemplates/*}", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_update_inspect_template(request, metadata) + request, metadata = self._interceptor.pre_update_inspect_template( + request, metadata + ) pb_request = dlp.UpdateInspectTemplateRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], + transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=True + use_integers_for_enums=True, ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3835,19 +4296,24 @@ class _UpdateJobTrigger(DlpServiceRestStub): def __hash__(self): return hash("UpdateJobTrigger") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.UpdateJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.JobTrigger: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.UpdateJobTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: r"""Call the update job trigger method over HTTP. Args: @@ -3868,56 +4334,61 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/jobTriggers/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v2/{name=projects/*/jobTriggers/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=projects/*/locations/*/jobTriggers/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=organizations/*/locations/*/jobTriggers/*}", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_update_job_trigger(request, metadata) + request, metadata = self._interceptor.pre_update_job_trigger( + request, metadata + ) pb_request = dlp.UpdateJobTriggerRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], + transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=True + use_integers_for_enums=True, ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3936,19 +4407,24 @@ class _UpdateStoredInfoType(DlpServiceRestStub): def __hash__(self): return hash("UpdateStoredInfoType") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.UpdateStoredInfoTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.StoredInfoType: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.UpdateStoredInfoTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: r"""Call the update stored info type method over HTTP. Args: @@ -3970,61 +4446,66 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v2/{name=organizations/*/storedInfoTypes/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=organizations/*/locations/*/storedInfoTypes/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=projects/*/storedInfoTypes/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=projects/*/locations/*/storedInfoTypes/*}", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_update_stored_info_type(request, metadata) + request, metadata = self._interceptor.pre_update_stored_info_type( + request, metadata + ) pb_request = dlp.UpdateStoredInfoTypeRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], + transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=True + use_integers_for_enums=True, ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -4040,276 +4521,266 @@ def __call__(self, return resp @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - dlp.DlpJob]: + def activate_job_trigger( + self, + ) -> Callable[[dlp.ActivateJobTriggerRequest], dlp.DlpJob]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ActivateJobTrigger(self._session, self._host, self._interceptor) # type: ignore + return self._ActivateJobTrigger(self._session, self._host, self._interceptor) # type: ignore @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - empty_pb2.Empty]: + def cancel_dlp_job(self) -> Callable[[dlp.CancelDlpJobRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CancelDlpJob(self._session, self._host, self._interceptor) # type: ignore + return self._CancelDlpJob(self._session, self._host, self._interceptor) # type: ignore @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: + def create_deidentify_template( + self, + ) -> Callable[[dlp.CreateDeidentifyTemplateRequest], dlp.DeidentifyTemplate]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + return self._CreateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - dlp.DlpJob]: + def create_dlp_job(self) -> Callable[[dlp.CreateDlpJobRequest], dlp.DlpJob]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateDlpJob(self._session, self._host, self._interceptor) # type: ignore + return self._CreateDlpJob(self._session, self._host, self._interceptor) # type: ignore @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - dlp.InspectTemplate]: + def create_inspect_template( + self, + ) -> Callable[[dlp.CreateInspectTemplateRequest], dlp.InspectTemplate]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + return self._CreateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - dlp.JobTrigger]: + def create_job_trigger( + self, + ) -> Callable[[dlp.CreateJobTriggerRequest], dlp.JobTrigger]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateJobTrigger(self._session, self._host, self._interceptor) # type: ignore + return self._CreateJobTrigger(self._session, self._host, self._interceptor) # type: ignore @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - dlp.StoredInfoType]: + def create_stored_info_type( + self, + ) -> Callable[[dlp.CreateStoredInfoTypeRequest], dlp.StoredInfoType]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + return self._CreateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - dlp.DeidentifyContentResponse]: + def deidentify_content( + self, + ) -> Callable[[dlp.DeidentifyContentRequest], dlp.DeidentifyContentResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeidentifyContent(self._session, self._host, self._interceptor) # type: ignore + return self._DeidentifyContent(self._session, self._host, self._interceptor) # type: ignore @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - empty_pb2.Empty]: + def delete_deidentify_template( + self, + ) -> Callable[[dlp.DeleteDeidentifyTemplateRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - empty_pb2.Empty]: + def delete_dlp_job(self) -> Callable[[dlp.DeleteDlpJobRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteDlpJob(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteDlpJob(self._session, self._host, self._interceptor) # type: ignore @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - empty_pb2.Empty]: + def delete_inspect_template( + self, + ) -> Callable[[dlp.DeleteInspectTemplateRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteInspectTemplate(self._session, self._host, self._interceptor) # type: ignore @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - empty_pb2.Empty]: + def delete_job_trigger( + self, + ) -> Callable[[dlp.DeleteJobTriggerRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteJobTrigger(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteJobTrigger(self._session, self._host, self._interceptor) # type: ignore @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - empty_pb2.Empty]: + def delete_stored_info_type( + self, + ) -> Callable[[dlp.DeleteStoredInfoTypeRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteStoredInfoType(self._session, self._host, self._interceptor) # type: ignore @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - empty_pb2.Empty]: + def finish_dlp_job(self) -> Callable[[dlp.FinishDlpJobRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._FinishDlpJob(self._session, self._host, self._interceptor) # type: ignore + return self._FinishDlpJob(self._session, self._host, self._interceptor) # type: ignore @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: + def get_deidentify_template( + self, + ) -> Callable[[dlp.GetDeidentifyTemplateRequest], dlp.DeidentifyTemplate]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + return self._GetDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - dlp.DlpJob]: + def get_dlp_job(self) -> Callable[[dlp.GetDlpJobRequest], dlp.DlpJob]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetDlpJob(self._session, self._host, self._interceptor) # type: ignore + return self._GetDlpJob(self._session, self._host, self._interceptor) # type: ignore @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - dlp.InspectTemplate]: + def get_inspect_template( + self, + ) -> Callable[[dlp.GetInspectTemplateRequest], dlp.InspectTemplate]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + return self._GetInspectTemplate(self._session, self._host, self._interceptor) # type: ignore @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - dlp.JobTrigger]: + def get_job_trigger(self) -> Callable[[dlp.GetJobTriggerRequest], dlp.JobTrigger]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetJobTrigger(self._session, self._host, self._interceptor) # type: ignore + return self._GetJobTrigger(self._session, self._host, self._interceptor) # type: ignore @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - dlp.StoredInfoType]: + def get_stored_info_type( + self, + ) -> Callable[[dlp.GetStoredInfoTypeRequest], dlp.StoredInfoType]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + return self._GetStoredInfoType(self._session, self._host, self._interceptor) # type: ignore @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - dlp.HybridInspectResponse]: + def hybrid_inspect_dlp_job( + self, + ) -> Callable[[dlp.HybridInspectDlpJobRequest], dlp.HybridInspectResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._HybridInspectDlpJob(self._session, self._host, self._interceptor) # type: ignore + return self._HybridInspectDlpJob(self._session, self._host, self._interceptor) # type: ignore @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - dlp.HybridInspectResponse]: + def hybrid_inspect_job_trigger( + self, + ) -> Callable[[dlp.HybridInspectJobTriggerRequest], dlp.HybridInspectResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._HybridInspectJobTrigger(self._session, self._host, self._interceptor) # type: ignore + return self._HybridInspectJobTrigger(self._session, self._host, self._interceptor) # type: ignore @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - dlp.InspectContentResponse]: + def inspect_content( + self, + ) -> Callable[[dlp.InspectContentRequest], dlp.InspectContentResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._InspectContent(self._session, self._host, self._interceptor) # type: ignore + return self._InspectContent(self._session, self._host, self._interceptor) # type: ignore @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - dlp.ListDeidentifyTemplatesResponse]: + def list_deidentify_templates( + self, + ) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], dlp.ListDeidentifyTemplatesResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListDeidentifyTemplates(self._session, self._host, self._interceptor) # type: ignore + return self._ListDeidentifyTemplates(self._session, self._host, self._interceptor) # type: ignore @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - dlp.ListDlpJobsResponse]: + def list_dlp_jobs( + self, + ) -> Callable[[dlp.ListDlpJobsRequest], dlp.ListDlpJobsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListDlpJobs(self._session, self._host, self._interceptor) # type: ignore + return self._ListDlpJobs(self._session, self._host, self._interceptor) # type: ignore @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - dlp.ListInfoTypesResponse]: + def list_info_types( + self, + ) -> Callable[[dlp.ListInfoTypesRequest], dlp.ListInfoTypesResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListInfoTypes(self._session, self._host, self._interceptor) # type: ignore + return self._ListInfoTypes(self._session, self._host, self._interceptor) # type: ignore @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - dlp.ListInspectTemplatesResponse]: + def list_inspect_templates( + self, + ) -> Callable[[dlp.ListInspectTemplatesRequest], dlp.ListInspectTemplatesResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListInspectTemplates(self._session, self._host, self._interceptor) # type: ignore + return self._ListInspectTemplates(self._session, self._host, self._interceptor) # type: ignore @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - dlp.ListJobTriggersResponse]: + def list_job_triggers( + self, + ) -> Callable[[dlp.ListJobTriggersRequest], dlp.ListJobTriggersResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListJobTriggers(self._session, self._host, self._interceptor) # type: ignore + return self._ListJobTriggers(self._session, self._host, self._interceptor) # type: ignore @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - dlp.ListStoredInfoTypesResponse]: + def list_stored_info_types( + self, + ) -> Callable[[dlp.ListStoredInfoTypesRequest], dlp.ListStoredInfoTypesResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListStoredInfoTypes(self._session, self._host, self._interceptor) # type: ignore + return self._ListStoredInfoTypes(self._session, self._host, self._interceptor) # type: ignore @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - dlp.RedactImageResponse]: + def redact_image( + self, + ) -> Callable[[dlp.RedactImageRequest], dlp.RedactImageResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._RedactImage(self._session, self._host, self._interceptor) # type: ignore + return self._RedactImage(self._session, self._host, self._interceptor) # type: ignore @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - dlp.ReidentifyContentResponse]: + def reidentify_content( + self, + ) -> Callable[[dlp.ReidentifyContentRequest], dlp.ReidentifyContentResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ReidentifyContent(self._session, self._host, self._interceptor) # type: ignore + return self._ReidentifyContent(self._session, self._host, self._interceptor) # type: ignore @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: + def update_deidentify_template( + self, + ) -> Callable[[dlp.UpdateDeidentifyTemplateRequest], dlp.DeidentifyTemplate]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - dlp.InspectTemplate]: + def update_inspect_template( + self, + ) -> Callable[[dlp.UpdateInspectTemplateRequest], dlp.InspectTemplate]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - dlp.JobTrigger]: + def update_job_trigger( + self, + ) -> Callable[[dlp.UpdateJobTriggerRequest], dlp.JobTrigger]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateJobTrigger(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateJobTrigger(self._session, self._host, self._interceptor) # type: ignore @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - dlp.StoredInfoType]: + def update_stored_info_type( + self, + ) -> Callable[[dlp.UpdateStoredInfoTypeRequest], dlp.StoredInfoType]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore @property def kind(self) -> str: @@ -4319,6 +4790,4 @@ def close(self): self._session.close() -__all__=( - 'DlpServiceRestTransport', -) +__all__ = ("DlpServiceRestTransport",) diff --git a/google/cloud/dlp_v2/types/storage.py b/google/cloud/dlp_v2/types/storage.py index e0517089..15d8c78e 100644 --- a/google/cloud/dlp_v2/types/storage.py +++ b/google/cloud/dlp_v2/types/storage.py @@ -110,11 +110,11 @@ class FileType(proto.Enum): WORD (5): Word files >30 MB will be scanned as binary files. Included file extensions: - docx, dotx, docm, dotm + docx, dotx, docm, dotm PDF (6): PDF files >30 MB will be scanned as binary files. Included file extensions: - pdf + pdf AVRO (7): Included file extensions: avro @@ -127,11 +127,11 @@ class FileType(proto.Enum): POWERPOINT (11): Powerpoint files >30 MB will be scanned as binary files. Included file extensions: - pptx, pptm, potx, potm, pot + pptx, pptm, potx, potm, pot EXCEL (12): Excel files >30 MB will be scanned as binary files. Included file extensions: - xlsx, xlsm, xltx, xltm + xlsx, xlsm, xltx, xltm """ FILE_TYPE_UNSPECIFIED = 0 BINARY_FILE = 1 diff --git a/owl-bot-staging/v2/.coveragerc b/owl-bot-staging/v2/.coveragerc deleted file mode 100644 index 76798ec2..00000000 --- a/owl-bot-staging/v2/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/dlp/__init__.py - google/cloud/dlp/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/v2/.flake8 b/owl-bot-staging/v2/.flake8 deleted file mode 100644 index 29227d4c..00000000 --- a/owl-bot-staging/v2/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/v2/MANIFEST.in b/owl-bot-staging/v2/MANIFEST.in deleted file mode 100644 index 148f6bf3..00000000 --- a/owl-bot-staging/v2/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/dlp *.py -recursive-include google/cloud/dlp_v2 *.py diff --git a/owl-bot-staging/v2/README.rst b/owl-bot-staging/v2/README.rst deleted file mode 100644 index cf97c2e7..00000000 --- a/owl-bot-staging/v2/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Dlp API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Dlp API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v2/docs/conf.py b/owl-bot-staging/v2/docs/conf.py deleted file mode 100644 index cf2f570a..00000000 --- a/owl-bot-staging/v2/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-dlp documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-dlp" -copyright = u"2022, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-dlp-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-dlp.tex", - u"google-cloud-dlp Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-dlp", - u"Google Cloud Dlp Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-dlp", - u"google-cloud-dlp Documentation", - author, - "google-cloud-dlp", - "GAPIC library for Google Cloud Dlp API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst b/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst deleted file mode 100644 index 914da512..00000000 --- a/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -DlpService ----------------------------- - -.. automodule:: google.cloud.dlp_v2.services.dlp_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dlp_v2.services.dlp_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v2/docs/dlp_v2/services.rst b/owl-bot-staging/v2/docs/dlp_v2/services.rst deleted file mode 100644 index 864a8c83..00000000 --- a/owl-bot-staging/v2/docs/dlp_v2/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Dlp v2 API -==================================== -.. toctree:: - :maxdepth: 2 - - dlp_service diff --git a/owl-bot-staging/v2/docs/dlp_v2/types.rst b/owl-bot-staging/v2/docs/dlp_v2/types.rst deleted file mode 100644 index 5470b717..00000000 --- a/owl-bot-staging/v2/docs/dlp_v2/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Dlp v2 API -================================= - -.. automodule:: google.cloud.dlp_v2.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/v2/docs/index.rst b/owl-bot-staging/v2/docs/index.rst deleted file mode 100644 index d119451a..00000000 --- a/owl-bot-staging/v2/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - dlp_v2/services - dlp_v2/types diff --git a/owl-bot-staging/v2/google/cloud/dlp/__init__.py b/owl-bot-staging/v2/google/cloud/dlp/__init__.py deleted file mode 100644 index 3c1a800c..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp/__init__.py +++ /dev/null @@ -1,395 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.dlp import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.dlp_v2.services.dlp_service.client import DlpServiceClient -from google.cloud.dlp_v2.services.dlp_service.async_client import DlpServiceAsyncClient - -from google.cloud.dlp_v2.types.dlp import Action -from google.cloud.dlp_v2.types.dlp import ActivateJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import AnalyzeDataSourceRiskDetails -from google.cloud.dlp_v2.types.dlp import BoundingBox -from google.cloud.dlp_v2.types.dlp import BucketingConfig -from google.cloud.dlp_v2.types.dlp import ByteContentItem -from google.cloud.dlp_v2.types.dlp import CancelDlpJobRequest -from google.cloud.dlp_v2.types.dlp import CharacterMaskConfig -from google.cloud.dlp_v2.types.dlp import CharsToIgnore -from google.cloud.dlp_v2.types.dlp import Color -from google.cloud.dlp_v2.types.dlp import Container -from google.cloud.dlp_v2.types.dlp import ContentItem -from google.cloud.dlp_v2.types.dlp import ContentLocation -from google.cloud.dlp_v2.types.dlp import CreateDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import CreateDlpJobRequest -from google.cloud.dlp_v2.types.dlp import CreateInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import CreateJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import CreateStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import CryptoDeterministicConfig -from google.cloud.dlp_v2.types.dlp import CryptoHashConfig -from google.cloud.dlp_v2.types.dlp import CryptoKey -from google.cloud.dlp_v2.types.dlp import CryptoReplaceFfxFpeConfig -from google.cloud.dlp_v2.types.dlp import DataProfileAction -from google.cloud.dlp_v2.types.dlp import DataProfileConfigSnapshot -from google.cloud.dlp_v2.types.dlp import DataProfileJobConfig -from google.cloud.dlp_v2.types.dlp import DataProfileLocation -from google.cloud.dlp_v2.types.dlp import DataProfilePubSubCondition -from google.cloud.dlp_v2.types.dlp import DataProfilePubSubMessage -from google.cloud.dlp_v2.types.dlp import DataRiskLevel -from google.cloud.dlp_v2.types.dlp import DateShiftConfig -from google.cloud.dlp_v2.types.dlp import DateTime -from google.cloud.dlp_v2.types.dlp import DeidentifyConfig -from google.cloud.dlp_v2.types.dlp import DeidentifyContentRequest -from google.cloud.dlp_v2.types.dlp import DeidentifyContentResponse -from google.cloud.dlp_v2.types.dlp import DeidentifyTemplate -from google.cloud.dlp_v2.types.dlp import DeleteDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import DeleteDlpJobRequest -from google.cloud.dlp_v2.types.dlp import DeleteInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import DeleteJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import DeleteStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import DlpJob -from google.cloud.dlp_v2.types.dlp import DocumentLocation -from google.cloud.dlp_v2.types.dlp import Error -from google.cloud.dlp_v2.types.dlp import ExcludeByHotword -from google.cloud.dlp_v2.types.dlp import ExcludeInfoTypes -from google.cloud.dlp_v2.types.dlp import ExclusionRule -from google.cloud.dlp_v2.types.dlp import FieldTransformation -from google.cloud.dlp_v2.types.dlp import Finding -from google.cloud.dlp_v2.types.dlp import FinishDlpJobRequest -from google.cloud.dlp_v2.types.dlp import FixedSizeBucketingConfig -from google.cloud.dlp_v2.types.dlp import GetDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import GetDlpJobRequest -from google.cloud.dlp_v2.types.dlp import GetInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import GetJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import GetStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import HybridContentItem -from google.cloud.dlp_v2.types.dlp import HybridFindingDetails -from google.cloud.dlp_v2.types.dlp import HybridInspectDlpJobRequest -from google.cloud.dlp_v2.types.dlp import HybridInspectJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import HybridInspectResponse -from google.cloud.dlp_v2.types.dlp import HybridInspectStatistics -from google.cloud.dlp_v2.types.dlp import ImageLocation -from google.cloud.dlp_v2.types.dlp import ImageTransformations -from google.cloud.dlp_v2.types.dlp import InfoTypeCategory -from google.cloud.dlp_v2.types.dlp import InfoTypeDescription -from google.cloud.dlp_v2.types.dlp import InfoTypeStats -from google.cloud.dlp_v2.types.dlp import InfoTypeSummary -from google.cloud.dlp_v2.types.dlp import InfoTypeTransformations -from google.cloud.dlp_v2.types.dlp import InspectConfig -from google.cloud.dlp_v2.types.dlp import InspectContentRequest -from google.cloud.dlp_v2.types.dlp import InspectContentResponse -from google.cloud.dlp_v2.types.dlp import InspectDataSourceDetails -from google.cloud.dlp_v2.types.dlp import InspectionRule -from google.cloud.dlp_v2.types.dlp import InspectionRuleSet -from google.cloud.dlp_v2.types.dlp import InspectJobConfig -from google.cloud.dlp_v2.types.dlp import InspectResult -from google.cloud.dlp_v2.types.dlp import InspectTemplate -from google.cloud.dlp_v2.types.dlp import JobTrigger -from google.cloud.dlp_v2.types.dlp import KmsWrappedCryptoKey -from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryConfig -from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryStats -from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesRequest -from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesResponse -from google.cloud.dlp_v2.types.dlp import ListDlpJobsRequest -from google.cloud.dlp_v2.types.dlp import ListDlpJobsResponse -from google.cloud.dlp_v2.types.dlp import ListInfoTypesRequest -from google.cloud.dlp_v2.types.dlp import ListInfoTypesResponse -from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesRequest -from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesResponse -from google.cloud.dlp_v2.types.dlp import ListJobTriggersRequest -from google.cloud.dlp_v2.types.dlp import ListJobTriggersResponse -from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesRequest -from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesResponse -from google.cloud.dlp_v2.types.dlp import Location -from google.cloud.dlp_v2.types.dlp import Manual -from google.cloud.dlp_v2.types.dlp import MetadataLocation -from google.cloud.dlp_v2.types.dlp import OtherInfoTypeSummary -from google.cloud.dlp_v2.types.dlp import OutputStorageConfig -from google.cloud.dlp_v2.types.dlp import PrimitiveTransformation -from google.cloud.dlp_v2.types.dlp import PrivacyMetric -from google.cloud.dlp_v2.types.dlp import ProfileStatus -from google.cloud.dlp_v2.types.dlp import QuasiId -from google.cloud.dlp_v2.types.dlp import QuoteInfo -from google.cloud.dlp_v2.types.dlp import Range -from google.cloud.dlp_v2.types.dlp import RecordCondition -from google.cloud.dlp_v2.types.dlp import RecordLocation -from google.cloud.dlp_v2.types.dlp import RecordSuppression -from google.cloud.dlp_v2.types.dlp import RecordTransformation -from google.cloud.dlp_v2.types.dlp import RecordTransformations -from google.cloud.dlp_v2.types.dlp import RedactConfig -from google.cloud.dlp_v2.types.dlp import RedactImageRequest -from google.cloud.dlp_v2.types.dlp import RedactImageResponse -from google.cloud.dlp_v2.types.dlp import ReidentifyContentRequest -from google.cloud.dlp_v2.types.dlp import ReidentifyContentResponse -from google.cloud.dlp_v2.types.dlp import ReplaceDictionaryConfig -from google.cloud.dlp_v2.types.dlp import ReplaceValueConfig -from google.cloud.dlp_v2.types.dlp import ReplaceWithInfoTypeConfig -from google.cloud.dlp_v2.types.dlp import RiskAnalysisJobConfig -from google.cloud.dlp_v2.types.dlp import Schedule -from google.cloud.dlp_v2.types.dlp import StatisticalTable -from google.cloud.dlp_v2.types.dlp import StorageMetadataLabel -from google.cloud.dlp_v2.types.dlp import StoredInfoType -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeConfig -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeStats -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeVersion -from google.cloud.dlp_v2.types.dlp import Table -from google.cloud.dlp_v2.types.dlp import TableDataProfile -from google.cloud.dlp_v2.types.dlp import TableLocation -from google.cloud.dlp_v2.types.dlp import TimePartConfig -from google.cloud.dlp_v2.types.dlp import TransformationConfig -from google.cloud.dlp_v2.types.dlp import TransformationDescription -from google.cloud.dlp_v2.types.dlp import TransformationDetails -from google.cloud.dlp_v2.types.dlp import TransformationDetailsStorageConfig -from google.cloud.dlp_v2.types.dlp import TransformationErrorHandling -from google.cloud.dlp_v2.types.dlp import TransformationLocation -from google.cloud.dlp_v2.types.dlp import TransformationOverview -from google.cloud.dlp_v2.types.dlp import TransformationResultStatus -from google.cloud.dlp_v2.types.dlp import TransformationSummary -from google.cloud.dlp_v2.types.dlp import TransientCryptoKey -from google.cloud.dlp_v2.types.dlp import UnwrappedCryptoKey -from google.cloud.dlp_v2.types.dlp import UpdateDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import UpdateInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import UpdateJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import UpdateStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import Value -from google.cloud.dlp_v2.types.dlp import ValueFrequency -from google.cloud.dlp_v2.types.dlp import VersionDescription -from google.cloud.dlp_v2.types.dlp import ContentOption -from google.cloud.dlp_v2.types.dlp import DlpJobType -from google.cloud.dlp_v2.types.dlp import EncryptionStatus -from google.cloud.dlp_v2.types.dlp import InfoTypeSupportedBy -from google.cloud.dlp_v2.types.dlp import MatchingType -from google.cloud.dlp_v2.types.dlp import MetadataType -from google.cloud.dlp_v2.types.dlp import RelationalOperator -from google.cloud.dlp_v2.types.dlp import ResourceVisibility -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeState -from google.cloud.dlp_v2.types.dlp import TransformationContainerType -from google.cloud.dlp_v2.types.dlp import TransformationResultStatusType -from google.cloud.dlp_v2.types.dlp import TransformationType -from google.cloud.dlp_v2.types.storage import BigQueryField -from google.cloud.dlp_v2.types.storage import BigQueryKey -from google.cloud.dlp_v2.types.storage import BigQueryOptions -from google.cloud.dlp_v2.types.storage import BigQueryTable -from google.cloud.dlp_v2.types.storage import CloudStorageFileSet -from google.cloud.dlp_v2.types.storage import CloudStorageOptions -from google.cloud.dlp_v2.types.storage import CloudStoragePath -from google.cloud.dlp_v2.types.storage import CloudStorageRegexFileSet -from google.cloud.dlp_v2.types.storage import CustomInfoType -from google.cloud.dlp_v2.types.storage import DatastoreKey -from google.cloud.dlp_v2.types.storage import DatastoreOptions -from google.cloud.dlp_v2.types.storage import EntityId -from google.cloud.dlp_v2.types.storage import FieldId -from google.cloud.dlp_v2.types.storage import HybridOptions -from google.cloud.dlp_v2.types.storage import InfoType -from google.cloud.dlp_v2.types.storage import Key -from google.cloud.dlp_v2.types.storage import KindExpression -from google.cloud.dlp_v2.types.storage import PartitionId -from google.cloud.dlp_v2.types.storage import RecordKey -from google.cloud.dlp_v2.types.storage import SensitivityScore -from google.cloud.dlp_v2.types.storage import StorageConfig -from google.cloud.dlp_v2.types.storage import StoredType -from google.cloud.dlp_v2.types.storage import TableOptions -from google.cloud.dlp_v2.types.storage import FileType -from google.cloud.dlp_v2.types.storage import Likelihood - -__all__ = ('DlpServiceClient', - 'DlpServiceAsyncClient', - 'Action', - 'ActivateJobTriggerRequest', - 'AnalyzeDataSourceRiskDetails', - 'BoundingBox', - 'BucketingConfig', - 'ByteContentItem', - 'CancelDlpJobRequest', - 'CharacterMaskConfig', - 'CharsToIgnore', - 'Color', - 'Container', - 'ContentItem', - 'ContentLocation', - 'CreateDeidentifyTemplateRequest', - 'CreateDlpJobRequest', - 'CreateInspectTemplateRequest', - 'CreateJobTriggerRequest', - 'CreateStoredInfoTypeRequest', - 'CryptoDeterministicConfig', - 'CryptoHashConfig', - 'CryptoKey', - 'CryptoReplaceFfxFpeConfig', - 'DataProfileAction', - 'DataProfileConfigSnapshot', - 'DataProfileJobConfig', - 'DataProfileLocation', - 'DataProfilePubSubCondition', - 'DataProfilePubSubMessage', - 'DataRiskLevel', - 'DateShiftConfig', - 'DateTime', - 'DeidentifyConfig', - 'DeidentifyContentRequest', - 'DeidentifyContentResponse', - 'DeidentifyTemplate', - 'DeleteDeidentifyTemplateRequest', - 'DeleteDlpJobRequest', - 'DeleteInspectTemplateRequest', - 'DeleteJobTriggerRequest', - 'DeleteStoredInfoTypeRequest', - 'DlpJob', - 'DocumentLocation', - 'Error', - 'ExcludeByHotword', - 'ExcludeInfoTypes', - 'ExclusionRule', - 'FieldTransformation', - 'Finding', - 'FinishDlpJobRequest', - 'FixedSizeBucketingConfig', - 'GetDeidentifyTemplateRequest', - 'GetDlpJobRequest', - 'GetInspectTemplateRequest', - 'GetJobTriggerRequest', - 'GetStoredInfoTypeRequest', - 'HybridContentItem', - 'HybridFindingDetails', - 'HybridInspectDlpJobRequest', - 'HybridInspectJobTriggerRequest', - 'HybridInspectResponse', - 'HybridInspectStatistics', - 'ImageLocation', - 'ImageTransformations', - 'InfoTypeCategory', - 'InfoTypeDescription', - 'InfoTypeStats', - 'InfoTypeSummary', - 'InfoTypeTransformations', - 'InspectConfig', - 'InspectContentRequest', - 'InspectContentResponse', - 'InspectDataSourceDetails', - 'InspectionRule', - 'InspectionRuleSet', - 'InspectJobConfig', - 'InspectResult', - 'InspectTemplate', - 'JobTrigger', - 'KmsWrappedCryptoKey', - 'LargeCustomDictionaryConfig', - 'LargeCustomDictionaryStats', - 'ListDeidentifyTemplatesRequest', - 'ListDeidentifyTemplatesResponse', - 'ListDlpJobsRequest', - 'ListDlpJobsResponse', - 'ListInfoTypesRequest', - 'ListInfoTypesResponse', - 'ListInspectTemplatesRequest', - 'ListInspectTemplatesResponse', - 'ListJobTriggersRequest', - 'ListJobTriggersResponse', - 'ListStoredInfoTypesRequest', - 'ListStoredInfoTypesResponse', - 'Location', - 'Manual', - 'MetadataLocation', - 'OtherInfoTypeSummary', - 'OutputStorageConfig', - 'PrimitiveTransformation', - 'PrivacyMetric', - 'ProfileStatus', - 'QuasiId', - 'QuoteInfo', - 'Range', - 'RecordCondition', - 'RecordLocation', - 'RecordSuppression', - 'RecordTransformation', - 'RecordTransformations', - 'RedactConfig', - 'RedactImageRequest', - 'RedactImageResponse', - 'ReidentifyContentRequest', - 'ReidentifyContentResponse', - 'ReplaceDictionaryConfig', - 'ReplaceValueConfig', - 'ReplaceWithInfoTypeConfig', - 'RiskAnalysisJobConfig', - 'Schedule', - 'StatisticalTable', - 'StorageMetadataLabel', - 'StoredInfoType', - 'StoredInfoTypeConfig', - 'StoredInfoTypeStats', - 'StoredInfoTypeVersion', - 'Table', - 'TableDataProfile', - 'TableLocation', - 'TimePartConfig', - 'TransformationConfig', - 'TransformationDescription', - 'TransformationDetails', - 'TransformationDetailsStorageConfig', - 'TransformationErrorHandling', - 'TransformationLocation', - 'TransformationOverview', - 'TransformationResultStatus', - 'TransformationSummary', - 'TransientCryptoKey', - 'UnwrappedCryptoKey', - 'UpdateDeidentifyTemplateRequest', - 'UpdateInspectTemplateRequest', - 'UpdateJobTriggerRequest', - 'UpdateStoredInfoTypeRequest', - 'Value', - 'ValueFrequency', - 'VersionDescription', - 'ContentOption', - 'DlpJobType', - 'EncryptionStatus', - 'InfoTypeSupportedBy', - 'MatchingType', - 'MetadataType', - 'RelationalOperator', - 'ResourceVisibility', - 'StoredInfoTypeState', - 'TransformationContainerType', - 'TransformationResultStatusType', - 'TransformationType', - 'BigQueryField', - 'BigQueryKey', - 'BigQueryOptions', - 'BigQueryTable', - 'CloudStorageFileSet', - 'CloudStorageOptions', - 'CloudStoragePath', - 'CloudStorageRegexFileSet', - 'CustomInfoType', - 'DatastoreKey', - 'DatastoreOptions', - 'EntityId', - 'FieldId', - 'HybridOptions', - 'InfoType', - 'Key', - 'KindExpression', - 'PartitionId', - 'RecordKey', - 'SensitivityScore', - 'StorageConfig', - 'StoredType', - 'TableOptions', - 'FileType', - 'Likelihood', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp/gapic_version.py b/owl-bot-staging/v2/google/cloud/dlp/gapic_version.py deleted file mode 100644 index 405b1ceb..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/dlp/py.typed b/owl-bot-staging/v2/google/cloud/dlp/py.typed deleted file mode 100644 index 23d89ef3..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py deleted file mode 100644 index 8397a3ad..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py +++ /dev/null @@ -1,396 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.dlp_v2 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.dlp_service import DlpServiceClient -from .services.dlp_service import DlpServiceAsyncClient - -from .types.dlp import Action -from .types.dlp import ActivateJobTriggerRequest -from .types.dlp import AnalyzeDataSourceRiskDetails -from .types.dlp import BoundingBox -from .types.dlp import BucketingConfig -from .types.dlp import ByteContentItem -from .types.dlp import CancelDlpJobRequest -from .types.dlp import CharacterMaskConfig -from .types.dlp import CharsToIgnore -from .types.dlp import Color -from .types.dlp import Container -from .types.dlp import ContentItem -from .types.dlp import ContentLocation -from .types.dlp import CreateDeidentifyTemplateRequest -from .types.dlp import CreateDlpJobRequest -from .types.dlp import CreateInspectTemplateRequest -from .types.dlp import CreateJobTriggerRequest -from .types.dlp import CreateStoredInfoTypeRequest -from .types.dlp import CryptoDeterministicConfig -from .types.dlp import CryptoHashConfig -from .types.dlp import CryptoKey -from .types.dlp import CryptoReplaceFfxFpeConfig -from .types.dlp import DataProfileAction -from .types.dlp import DataProfileConfigSnapshot -from .types.dlp import DataProfileJobConfig -from .types.dlp import DataProfileLocation -from .types.dlp import DataProfilePubSubCondition -from .types.dlp import DataProfilePubSubMessage -from .types.dlp import DataRiskLevel -from .types.dlp import DateShiftConfig -from .types.dlp import DateTime -from .types.dlp import DeidentifyConfig -from .types.dlp import DeidentifyContentRequest -from .types.dlp import DeidentifyContentResponse -from .types.dlp import DeidentifyTemplate -from .types.dlp import DeleteDeidentifyTemplateRequest -from .types.dlp import DeleteDlpJobRequest -from .types.dlp import DeleteInspectTemplateRequest -from .types.dlp import DeleteJobTriggerRequest -from .types.dlp import DeleteStoredInfoTypeRequest -from .types.dlp import DlpJob -from .types.dlp import DocumentLocation -from .types.dlp import Error -from .types.dlp import ExcludeByHotword -from .types.dlp import ExcludeInfoTypes -from .types.dlp import ExclusionRule -from .types.dlp import FieldTransformation -from .types.dlp import Finding -from .types.dlp import FinishDlpJobRequest -from .types.dlp import FixedSizeBucketingConfig -from .types.dlp import GetDeidentifyTemplateRequest -from .types.dlp import GetDlpJobRequest -from .types.dlp import GetInspectTemplateRequest -from .types.dlp import GetJobTriggerRequest -from .types.dlp import GetStoredInfoTypeRequest -from .types.dlp import HybridContentItem -from .types.dlp import HybridFindingDetails -from .types.dlp import HybridInspectDlpJobRequest -from .types.dlp import HybridInspectJobTriggerRequest -from .types.dlp import HybridInspectResponse -from .types.dlp import HybridInspectStatistics -from .types.dlp import ImageLocation -from .types.dlp import ImageTransformations -from .types.dlp import InfoTypeCategory -from .types.dlp import InfoTypeDescription -from .types.dlp import InfoTypeStats -from .types.dlp import InfoTypeSummary -from .types.dlp import InfoTypeTransformations -from .types.dlp import InspectConfig -from .types.dlp import InspectContentRequest -from .types.dlp import InspectContentResponse -from .types.dlp import InspectDataSourceDetails -from .types.dlp import InspectionRule -from .types.dlp import InspectionRuleSet -from .types.dlp import InspectJobConfig -from .types.dlp import InspectResult -from .types.dlp import InspectTemplate -from .types.dlp import JobTrigger -from .types.dlp import KmsWrappedCryptoKey -from .types.dlp import LargeCustomDictionaryConfig -from .types.dlp import LargeCustomDictionaryStats -from .types.dlp import ListDeidentifyTemplatesRequest -from .types.dlp import ListDeidentifyTemplatesResponse -from .types.dlp import ListDlpJobsRequest -from .types.dlp import ListDlpJobsResponse -from .types.dlp import ListInfoTypesRequest -from .types.dlp import ListInfoTypesResponse -from .types.dlp import ListInspectTemplatesRequest -from .types.dlp import ListInspectTemplatesResponse -from .types.dlp import ListJobTriggersRequest -from .types.dlp import ListJobTriggersResponse -from .types.dlp import ListStoredInfoTypesRequest -from .types.dlp import ListStoredInfoTypesResponse -from .types.dlp import Location -from .types.dlp import Manual -from .types.dlp import MetadataLocation -from .types.dlp import OtherInfoTypeSummary -from .types.dlp import OutputStorageConfig -from .types.dlp import PrimitiveTransformation -from .types.dlp import PrivacyMetric -from .types.dlp import ProfileStatus -from .types.dlp import QuasiId -from .types.dlp import QuoteInfo -from .types.dlp import Range -from .types.dlp import RecordCondition -from .types.dlp import RecordLocation -from .types.dlp import RecordSuppression -from .types.dlp import RecordTransformation -from .types.dlp import RecordTransformations -from .types.dlp import RedactConfig -from .types.dlp import RedactImageRequest -from .types.dlp import RedactImageResponse -from .types.dlp import ReidentifyContentRequest -from .types.dlp import ReidentifyContentResponse -from .types.dlp import ReplaceDictionaryConfig -from .types.dlp import ReplaceValueConfig -from .types.dlp import ReplaceWithInfoTypeConfig -from .types.dlp import RiskAnalysisJobConfig -from .types.dlp import Schedule -from .types.dlp import StatisticalTable -from .types.dlp import StorageMetadataLabel -from .types.dlp import StoredInfoType -from .types.dlp import StoredInfoTypeConfig -from .types.dlp import StoredInfoTypeStats -from .types.dlp import StoredInfoTypeVersion -from .types.dlp import Table -from .types.dlp import TableDataProfile -from .types.dlp import TableLocation -from .types.dlp import TimePartConfig -from .types.dlp import TransformationConfig -from .types.dlp import TransformationDescription -from .types.dlp import TransformationDetails -from .types.dlp import TransformationDetailsStorageConfig -from .types.dlp import TransformationErrorHandling -from .types.dlp import TransformationLocation -from .types.dlp import TransformationOverview -from .types.dlp import TransformationResultStatus -from .types.dlp import TransformationSummary -from .types.dlp import TransientCryptoKey -from .types.dlp import UnwrappedCryptoKey -from .types.dlp import UpdateDeidentifyTemplateRequest -from .types.dlp import UpdateInspectTemplateRequest -from .types.dlp import UpdateJobTriggerRequest -from .types.dlp import UpdateStoredInfoTypeRequest -from .types.dlp import Value -from .types.dlp import ValueFrequency -from .types.dlp import VersionDescription -from .types.dlp import ContentOption -from .types.dlp import DlpJobType -from .types.dlp import EncryptionStatus -from .types.dlp import InfoTypeSupportedBy -from .types.dlp import MatchingType -from .types.dlp import MetadataType -from .types.dlp import RelationalOperator -from .types.dlp import ResourceVisibility -from .types.dlp import StoredInfoTypeState -from .types.dlp import TransformationContainerType -from .types.dlp import TransformationResultStatusType -from .types.dlp import TransformationType -from .types.storage import BigQueryField -from .types.storage import BigQueryKey -from .types.storage import BigQueryOptions -from .types.storage import BigQueryTable -from .types.storage import CloudStorageFileSet -from .types.storage import CloudStorageOptions -from .types.storage import CloudStoragePath -from .types.storage import CloudStorageRegexFileSet -from .types.storage import CustomInfoType -from .types.storage import DatastoreKey -from .types.storage import DatastoreOptions -from .types.storage import EntityId -from .types.storage import FieldId -from .types.storage import HybridOptions -from .types.storage import InfoType -from .types.storage import Key -from .types.storage import KindExpression -from .types.storage import PartitionId -from .types.storage import RecordKey -from .types.storage import SensitivityScore -from .types.storage import StorageConfig -from .types.storage import StoredType -from .types.storage import TableOptions -from .types.storage import FileType -from .types.storage import Likelihood - -__all__ = ( - 'DlpServiceAsyncClient', -'Action', -'ActivateJobTriggerRequest', -'AnalyzeDataSourceRiskDetails', -'BigQueryField', -'BigQueryKey', -'BigQueryOptions', -'BigQueryTable', -'BoundingBox', -'BucketingConfig', -'ByteContentItem', -'CancelDlpJobRequest', -'CharacterMaskConfig', -'CharsToIgnore', -'CloudStorageFileSet', -'CloudStorageOptions', -'CloudStoragePath', -'CloudStorageRegexFileSet', -'Color', -'Container', -'ContentItem', -'ContentLocation', -'ContentOption', -'CreateDeidentifyTemplateRequest', -'CreateDlpJobRequest', -'CreateInspectTemplateRequest', -'CreateJobTriggerRequest', -'CreateStoredInfoTypeRequest', -'CryptoDeterministicConfig', -'CryptoHashConfig', -'CryptoKey', -'CryptoReplaceFfxFpeConfig', -'CustomInfoType', -'DataProfileAction', -'DataProfileConfigSnapshot', -'DataProfileJobConfig', -'DataProfileLocation', -'DataProfilePubSubCondition', -'DataProfilePubSubMessage', -'DataRiskLevel', -'DatastoreKey', -'DatastoreOptions', -'DateShiftConfig', -'DateTime', -'DeidentifyConfig', -'DeidentifyContentRequest', -'DeidentifyContentResponse', -'DeidentifyTemplate', -'DeleteDeidentifyTemplateRequest', -'DeleteDlpJobRequest', -'DeleteInspectTemplateRequest', -'DeleteJobTriggerRequest', -'DeleteStoredInfoTypeRequest', -'DlpJob', -'DlpJobType', -'DlpServiceClient', -'DocumentLocation', -'EncryptionStatus', -'EntityId', -'Error', -'ExcludeByHotword', -'ExcludeInfoTypes', -'ExclusionRule', -'FieldId', -'FieldTransformation', -'FileType', -'Finding', -'FinishDlpJobRequest', -'FixedSizeBucketingConfig', -'GetDeidentifyTemplateRequest', -'GetDlpJobRequest', -'GetInspectTemplateRequest', -'GetJobTriggerRequest', -'GetStoredInfoTypeRequest', -'HybridContentItem', -'HybridFindingDetails', -'HybridInspectDlpJobRequest', -'HybridInspectJobTriggerRequest', -'HybridInspectResponse', -'HybridInspectStatistics', -'HybridOptions', -'ImageLocation', -'ImageTransformations', -'InfoType', -'InfoTypeCategory', -'InfoTypeDescription', -'InfoTypeStats', -'InfoTypeSummary', -'InfoTypeSupportedBy', -'InfoTypeTransformations', -'InspectConfig', -'InspectContentRequest', -'InspectContentResponse', -'InspectDataSourceDetails', -'InspectJobConfig', -'InspectResult', -'InspectTemplate', -'InspectionRule', -'InspectionRuleSet', -'JobTrigger', -'Key', -'KindExpression', -'KmsWrappedCryptoKey', -'LargeCustomDictionaryConfig', -'LargeCustomDictionaryStats', -'Likelihood', -'ListDeidentifyTemplatesRequest', -'ListDeidentifyTemplatesResponse', -'ListDlpJobsRequest', -'ListDlpJobsResponse', -'ListInfoTypesRequest', -'ListInfoTypesResponse', -'ListInspectTemplatesRequest', -'ListInspectTemplatesResponse', -'ListJobTriggersRequest', -'ListJobTriggersResponse', -'ListStoredInfoTypesRequest', -'ListStoredInfoTypesResponse', -'Location', -'Manual', -'MatchingType', -'MetadataLocation', -'MetadataType', -'OtherInfoTypeSummary', -'OutputStorageConfig', -'PartitionId', -'PrimitiveTransformation', -'PrivacyMetric', -'ProfileStatus', -'QuasiId', -'QuoteInfo', -'Range', -'RecordCondition', -'RecordKey', -'RecordLocation', -'RecordSuppression', -'RecordTransformation', -'RecordTransformations', -'RedactConfig', -'RedactImageRequest', -'RedactImageResponse', -'ReidentifyContentRequest', -'ReidentifyContentResponse', -'RelationalOperator', -'ReplaceDictionaryConfig', -'ReplaceValueConfig', -'ReplaceWithInfoTypeConfig', -'ResourceVisibility', -'RiskAnalysisJobConfig', -'Schedule', -'SensitivityScore', -'StatisticalTable', -'StorageConfig', -'StorageMetadataLabel', -'StoredInfoType', -'StoredInfoTypeConfig', -'StoredInfoTypeState', -'StoredInfoTypeStats', -'StoredInfoTypeVersion', -'StoredType', -'Table', -'TableDataProfile', -'TableLocation', -'TableOptions', -'TimePartConfig', -'TransformationConfig', -'TransformationContainerType', -'TransformationDescription', -'TransformationDetails', -'TransformationDetailsStorageConfig', -'TransformationErrorHandling', -'TransformationLocation', -'TransformationOverview', -'TransformationResultStatus', -'TransformationResultStatusType', -'TransformationSummary', -'TransformationType', -'TransientCryptoKey', -'UnwrappedCryptoKey', -'UpdateDeidentifyTemplateRequest', -'UpdateInspectTemplateRequest', -'UpdateJobTriggerRequest', -'UpdateStoredInfoTypeRequest', -'Value', -'ValueFrequency', -'VersionDescription', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json deleted file mode 100644 index 634002d4..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json +++ /dev/null @@ -1,538 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.dlp_v2", - "protoPackage": "google.privacy.dlp.v2", - "schema": "1.0", - "services": { - "DlpService": { - "clients": { - "grpc": { - "libraryClient": "DlpServiceClient", - "rpcs": { - "ActivateJobTrigger": { - "methods": [ - "activate_job_trigger" - ] - }, - "CancelDlpJob": { - "methods": [ - "cancel_dlp_job" - ] - }, - "CreateDeidentifyTemplate": { - "methods": [ - "create_deidentify_template" - ] - }, - "CreateDlpJob": { - "methods": [ - "create_dlp_job" - ] - }, - "CreateInspectTemplate": { - "methods": [ - "create_inspect_template" - ] - }, - "CreateJobTrigger": { - "methods": [ - "create_job_trigger" - ] - }, - "CreateStoredInfoType": { - "methods": [ - "create_stored_info_type" - ] - }, - "DeidentifyContent": { - "methods": [ - "deidentify_content" - ] - }, - "DeleteDeidentifyTemplate": { - "methods": [ - "delete_deidentify_template" - ] - }, - "DeleteDlpJob": { - "methods": [ - "delete_dlp_job" - ] - }, - "DeleteInspectTemplate": { - "methods": [ - "delete_inspect_template" - ] - }, - "DeleteJobTrigger": { - "methods": [ - "delete_job_trigger" - ] - }, - "DeleteStoredInfoType": { - "methods": [ - "delete_stored_info_type" - ] - }, - "FinishDlpJob": { - "methods": [ - "finish_dlp_job" - ] - }, - "GetDeidentifyTemplate": { - "methods": [ - "get_deidentify_template" - ] - }, - "GetDlpJob": { - "methods": [ - "get_dlp_job" - ] - }, - "GetInspectTemplate": { - "methods": [ - "get_inspect_template" - ] - }, - "GetJobTrigger": { - "methods": [ - "get_job_trigger" - ] - }, - "GetStoredInfoType": { - "methods": [ - "get_stored_info_type" - ] - }, - "HybridInspectDlpJob": { - "methods": [ - "hybrid_inspect_dlp_job" - ] - }, - "HybridInspectJobTrigger": { - "methods": [ - "hybrid_inspect_job_trigger" - ] - }, - "InspectContent": { - "methods": [ - "inspect_content" - ] - }, - "ListDeidentifyTemplates": { - "methods": [ - "list_deidentify_templates" - ] - }, - "ListDlpJobs": { - "methods": [ - "list_dlp_jobs" - ] - }, - "ListInfoTypes": { - "methods": [ - "list_info_types" - ] - }, - "ListInspectTemplates": { - "methods": [ - "list_inspect_templates" - ] - }, - "ListJobTriggers": { - "methods": [ - "list_job_triggers" - ] - }, - "ListStoredInfoTypes": { - "methods": [ - "list_stored_info_types" - ] - }, - "RedactImage": { - "methods": [ - "redact_image" - ] - }, - "ReidentifyContent": { - "methods": [ - "reidentify_content" - ] - }, - "UpdateDeidentifyTemplate": { - "methods": [ - "update_deidentify_template" - ] - }, - "UpdateInspectTemplate": { - "methods": [ - "update_inspect_template" - ] - }, - "UpdateJobTrigger": { - "methods": [ - "update_job_trigger" - ] - }, - "UpdateStoredInfoType": { - "methods": [ - "update_stored_info_type" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DlpServiceAsyncClient", - "rpcs": { - "ActivateJobTrigger": { - "methods": [ - "activate_job_trigger" - ] - }, - "CancelDlpJob": { - "methods": [ - "cancel_dlp_job" - ] - }, - "CreateDeidentifyTemplate": { - "methods": [ - "create_deidentify_template" - ] - }, - "CreateDlpJob": { - "methods": [ - "create_dlp_job" - ] - }, - "CreateInspectTemplate": { - "methods": [ - "create_inspect_template" - ] - }, - "CreateJobTrigger": { - "methods": [ - "create_job_trigger" - ] - }, - "CreateStoredInfoType": { - "methods": [ - "create_stored_info_type" - ] - }, - "DeidentifyContent": { - "methods": [ - "deidentify_content" - ] - }, - "DeleteDeidentifyTemplate": { - "methods": [ - "delete_deidentify_template" - ] - }, - "DeleteDlpJob": { - "methods": [ - "delete_dlp_job" - ] - }, - "DeleteInspectTemplate": { - "methods": [ - "delete_inspect_template" - ] - }, - "DeleteJobTrigger": { - "methods": [ - "delete_job_trigger" - ] - }, - "DeleteStoredInfoType": { - "methods": [ - "delete_stored_info_type" - ] - }, - "FinishDlpJob": { - "methods": [ - "finish_dlp_job" - ] - }, - "GetDeidentifyTemplate": { - "methods": [ - "get_deidentify_template" - ] - }, - "GetDlpJob": { - "methods": [ - "get_dlp_job" - ] - }, - "GetInspectTemplate": { - "methods": [ - "get_inspect_template" - ] - }, - "GetJobTrigger": { - "methods": [ - "get_job_trigger" - ] - }, - "GetStoredInfoType": { - "methods": [ - "get_stored_info_type" - ] - }, - "HybridInspectDlpJob": { - "methods": [ - "hybrid_inspect_dlp_job" - ] - }, - "HybridInspectJobTrigger": { - "methods": [ - "hybrid_inspect_job_trigger" - ] - }, - "InspectContent": { - "methods": [ - "inspect_content" - ] - }, - "ListDeidentifyTemplates": { - "methods": [ - "list_deidentify_templates" - ] - }, - "ListDlpJobs": { - "methods": [ - "list_dlp_jobs" - ] - }, - "ListInfoTypes": { - "methods": [ - "list_info_types" - ] - }, - "ListInspectTemplates": { - "methods": [ - "list_inspect_templates" - ] - }, - "ListJobTriggers": { - "methods": [ - "list_job_triggers" - ] - }, - "ListStoredInfoTypes": { - "methods": [ - "list_stored_info_types" - ] - }, - "RedactImage": { - "methods": [ - "redact_image" - ] - }, - "ReidentifyContent": { - "methods": [ - "reidentify_content" - ] - }, - "UpdateDeidentifyTemplate": { - "methods": [ - "update_deidentify_template" - ] - }, - "UpdateInspectTemplate": { - "methods": [ - "update_inspect_template" - ] - }, - "UpdateJobTrigger": { - "methods": [ - "update_job_trigger" - ] - }, - "UpdateStoredInfoType": { - "methods": [ - "update_stored_info_type" - ] - } - } - }, - "rest": { - "libraryClient": "DlpServiceClient", - "rpcs": { - "ActivateJobTrigger": { - "methods": [ - "activate_job_trigger" - ] - }, - "CancelDlpJob": { - "methods": [ - "cancel_dlp_job" - ] - }, - "CreateDeidentifyTemplate": { - "methods": [ - "create_deidentify_template" - ] - }, - "CreateDlpJob": { - "methods": [ - "create_dlp_job" - ] - }, - "CreateInspectTemplate": { - "methods": [ - "create_inspect_template" - ] - }, - "CreateJobTrigger": { - "methods": [ - "create_job_trigger" - ] - }, - "CreateStoredInfoType": { - "methods": [ - "create_stored_info_type" - ] - }, - "DeidentifyContent": { - "methods": [ - "deidentify_content" - ] - }, - "DeleteDeidentifyTemplate": { - "methods": [ - "delete_deidentify_template" - ] - }, - "DeleteDlpJob": { - "methods": [ - "delete_dlp_job" - ] - }, - "DeleteInspectTemplate": { - "methods": [ - "delete_inspect_template" - ] - }, - "DeleteJobTrigger": { - "methods": [ - "delete_job_trigger" - ] - }, - "DeleteStoredInfoType": { - "methods": [ - "delete_stored_info_type" - ] - }, - "FinishDlpJob": { - "methods": [ - "finish_dlp_job" - ] - }, - "GetDeidentifyTemplate": { - "methods": [ - "get_deidentify_template" - ] - }, - "GetDlpJob": { - "methods": [ - "get_dlp_job" - ] - }, - "GetInspectTemplate": { - "methods": [ - "get_inspect_template" - ] - }, - "GetJobTrigger": { - "methods": [ - "get_job_trigger" - ] - }, - "GetStoredInfoType": { - "methods": [ - "get_stored_info_type" - ] - }, - "HybridInspectDlpJob": { - "methods": [ - "hybrid_inspect_dlp_job" - ] - }, - "HybridInspectJobTrigger": { - "methods": [ - "hybrid_inspect_job_trigger" - ] - }, - "InspectContent": { - "methods": [ - "inspect_content" - ] - }, - "ListDeidentifyTemplates": { - "methods": [ - "list_deidentify_templates" - ] - }, - "ListDlpJobs": { - "methods": [ - "list_dlp_jobs" - ] - }, - "ListInfoTypes": { - "methods": [ - "list_info_types" - ] - }, - "ListInspectTemplates": { - "methods": [ - "list_inspect_templates" - ] - }, - "ListJobTriggers": { - "methods": [ - "list_job_triggers" - ] - }, - "ListStoredInfoTypes": { - "methods": [ - "list_stored_info_types" - ] - }, - "RedactImage": { - "methods": [ - "redact_image" - ] - }, - "ReidentifyContent": { - "methods": [ - "reidentify_content" - ] - }, - "UpdateDeidentifyTemplate": { - "methods": [ - "update_deidentify_template" - ] - }, - "UpdateInspectTemplate": { - "methods": [ - "update_inspect_template" - ] - }, - "UpdateJobTrigger": { - "methods": [ - "update_job_trigger" - ] - }, - "UpdateStoredInfoType": { - "methods": [ - "update_stored_info_type" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py deleted file mode 100644 index 405b1ceb..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed b/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed deleted file mode 100644 index 23d89ef3..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py deleted file mode 100644 index e8e1c384..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py deleted file mode 100644 index aa9c062a..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DlpServiceClient -from .async_client import DlpServiceAsyncClient - -__all__ = ( - 'DlpServiceClient', - 'DlpServiceAsyncClient', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py deleted file mode 100644 index ca29f0c5..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py +++ /dev/null @@ -1,4142 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dlp_v2 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dlp_v2.services.dlp_service import pagers -from google.cloud.dlp_v2.types import dlp -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport -from .client import DlpServiceClient - - -class DlpServiceAsyncClient: - """The Cloud Data Loss Prevention (DLP) API is a service that - allows clients to detect the presence of Personally Identifiable - Information (PII) and other privacy-sensitive data in - user-supplied, unstructured data streams, like text blocks or - images. - The service also includes methods for sensitive data redaction - and scheduling of data scans on Google Cloud Platform based data - sets. - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - """ - - _client: DlpServiceClient - - DEFAULT_ENDPOINT = DlpServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DlpServiceClient.DEFAULT_MTLS_ENDPOINT - - deidentify_template_path = staticmethod(DlpServiceClient.deidentify_template_path) - parse_deidentify_template_path = staticmethod(DlpServiceClient.parse_deidentify_template_path) - dlp_content_path = staticmethod(DlpServiceClient.dlp_content_path) - parse_dlp_content_path = staticmethod(DlpServiceClient.parse_dlp_content_path) - dlp_job_path = staticmethod(DlpServiceClient.dlp_job_path) - parse_dlp_job_path = staticmethod(DlpServiceClient.parse_dlp_job_path) - finding_path = staticmethod(DlpServiceClient.finding_path) - parse_finding_path = staticmethod(DlpServiceClient.parse_finding_path) - inspect_template_path = staticmethod(DlpServiceClient.inspect_template_path) - parse_inspect_template_path = staticmethod(DlpServiceClient.parse_inspect_template_path) - job_trigger_path = staticmethod(DlpServiceClient.job_trigger_path) - parse_job_trigger_path = staticmethod(DlpServiceClient.parse_job_trigger_path) - stored_info_type_path = staticmethod(DlpServiceClient.stored_info_type_path) - parse_stored_info_type_path = staticmethod(DlpServiceClient.parse_stored_info_type_path) - common_billing_account_path = staticmethod(DlpServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DlpServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DlpServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(DlpServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(DlpServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(DlpServiceClient.parse_common_organization_path) - common_project_path = staticmethod(DlpServiceClient.common_project_path) - parse_common_project_path = staticmethod(DlpServiceClient.parse_common_project_path) - common_location_path = staticmethod(DlpServiceClient.common_location_path) - parse_common_location_path = staticmethod(DlpServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceAsyncClient: The constructed client. - """ - return DlpServiceClient.from_service_account_info.__func__(DlpServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceAsyncClient: The constructed client. - """ - return DlpServiceClient.from_service_account_file.__func__(DlpServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DlpServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DlpServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DlpServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(DlpServiceClient).get_transport_class, type(DlpServiceClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DlpServiceTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the dlp service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.DlpServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DlpServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def inspect_content(self, - request: Optional[Union[dlp.InspectContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectContentResponse: - r"""Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - For how to guides, see - https://cloud.google.com/dlp/docs/inspecting-images and - https://cloud.google.com/dlp/docs/inspecting-text, - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_inspect_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.InspectContentRequest( - ) - - # Make the request - response = await client.inspect_content(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.InspectContentRequest, dict]]): - The request object. Request to search for potentially - sensitive info in a ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectContentResponse: - Results of inspecting an item. - """ - # Create or coerce a protobuf request object. - request = dlp.InspectContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.inspect_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def redact_image(self, - request: Optional[Union[dlp.RedactImageRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.RedactImageResponse: - r"""Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/dlp/docs/redacting-sensitive-data-images - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_redact_image(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.RedactImageRequest( - ) - - # Make the request - response = await client.redact_image(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.RedactImageRequest, dict]]): - The request object. Request to search for potentially - sensitive info in an image and redact it by covering it - with a colored rectangle. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.RedactImageResponse: - Results of redacting an image. - """ - # Create or coerce a protobuf request object. - request = dlp.RedactImageRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.redact_image, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def deidentify_content(self, - request: Optional[Union[dlp.DeidentifyContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyContentResponse: - r"""De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/dlp/docs/deidentify-sensitive-data - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_deidentify_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeidentifyContentRequest( - ) - - # Make the request - response = await client.deidentify_content(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeidentifyContentRequest, dict]]): - The request object. Request to de-identify a - ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyContentResponse: - Results of de-identifying a - ContentItem. - - """ - # Create or coerce a protobuf request object. - request = dlp.DeidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.deidentify_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def reidentify_content(self, - request: Optional[Union[dlp.ReidentifyContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.ReidentifyContentResponse: - r"""Re-identifies content that has been de-identified. See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_reidentify_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ReidentifyContentRequest( - parent="parent_value", - ) - - # Make the request - response = await client.reidentify_content(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ReidentifyContentRequest, dict]]): - The request object. Request to re-identify an item. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.ReidentifyContentResponse: - Results of re-identifying an item. - """ - # Create or coerce a protobuf request object. - request = dlp.ReidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.reidentify_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_info_types(self, - request: Optional[Union[dlp.ListInfoTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.ListInfoTypesResponse: - r"""Returns a list of the sensitive information types - that DLP API supports. See - https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_info_types(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListInfoTypesRequest( - ) - - # Make the request - response = await client.list_info_types(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListInfoTypesRequest, dict]]): - The request object. Request for the list of infoTypes. - parent (:class:`str`): - The parent resource name. - - The format of this value is as follows: - - :: - - locations/LOCATION_ID - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.ListInfoTypesResponse: - Response to the ListInfoTypes - request. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListInfoTypesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_info_types, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_inspect_template(self, - request: Optional[Union[dlp.CreateInspectTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - inspect_template: Optional[dlp.InspectTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Creates an InspectTemplate for reusing frequently - used configuration for inspecting content, images, and - storage. See - https://cloud.google.com/dlp/docs/creating-templates to - learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateInspectTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateInspectTemplateRequest, dict]]): - The request object. Request message for - CreateInspectTemplate. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): - Required. The InspectTemplate to - create. - - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, inspect_template]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_template is not None: - request.inspect_template = inspect_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_inspect_template, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_inspect_template(self, - request: Optional[Union[dlp.UpdateInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - inspect_template: Optional[dlp.InspectTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_update_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.update_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.UpdateInspectTemplateRequest, dict]]): - The request object. Request message for - UpdateInspectTemplate. - name (:class:`str`): - Required. Resource name of organization and - inspectTemplate to be updated, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): - New InspectTemplate value. - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, inspect_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.UpdateInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if inspect_template is not None: - request.inspect_template = inspect_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_inspect_template, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_inspect_template(self, - request: Optional[Union[dlp.GetInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetInspectTemplateRequest, dict]]): - The request object. Request message for - GetInspectTemplate. - name (:class:`str`): - Required. Resource name of the organization and - inspectTemplate to be read, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_inspect_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_inspect_templates(self, - request: Optional[Union[dlp.ListInspectTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInspectTemplatesAsyncPager: - r"""Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_inspect_templates(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListInspectTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_inspect_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListInspectTemplatesRequest, dict]]): - The request object. Request message for - ListInspectTemplates. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager: - Response message for - ListInspectTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListInspectTemplatesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_inspect_templates, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInspectTemplatesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_inspect_template(self, - request: Optional[Union[dlp.DeleteInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteInspectTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_inspect_template(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteInspectTemplateRequest, dict]]): - The request object. Request message for - DeleteInspectTemplate. - name (:class:`str`): - Required. Resource name of the organization and - inspectTemplate to be deleted, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_inspect_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_deidentify_template(self, - request: Optional[Union[dlp.CreateDeidentifyTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - deidentify_template: Optional[dlp.DeidentifyTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Creates a DeidentifyTemplate for reusing frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDeidentifyTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest, dict]]): - The request object. Request message for - CreateDeidentifyTemplate. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): - Required. The DeidentifyTemplate to - create. - - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, deidentify_template]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if deidentify_template is not None: - request.deidentify_template = deidentify_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_deidentify_template, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_deidentify_template(self, - request: Optional[Union[dlp.UpdateDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - deidentify_template: Optional[dlp.DeidentifyTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Updates the DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_update_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.update_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest, dict]]): - The request object. Request message for - UpdateDeidentifyTemplate. - name (:class:`str`): - Required. Resource name of organization and deidentify - template to be updated, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): - New DeidentifyTemplate value. - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, deidentify_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.UpdateDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if deidentify_template is not None: - request.deidentify_template = deidentify_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_deidentify_template, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_deidentify_template(self, - request: Optional[Union[dlp.GetDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Gets a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest, dict]]): - The request object. Request message for - GetDeidentifyTemplate. - name (:class:`str`): - Required. Resource name of the organization and - deidentify template to be read, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_deidentify_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_deidentify_templates(self, - request: Optional[Union[dlp.ListDeidentifyTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDeidentifyTemplatesAsyncPager: - r"""Lists DeidentifyTemplates. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_deidentify_templates(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListDeidentifyTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_deidentify_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest, dict]]): - The request object. Request message for - ListDeidentifyTemplates. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager: - Response message for - ListDeidentifyTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListDeidentifyTemplatesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_deidentify_templates, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDeidentifyTemplatesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_deidentify_template(self, - request: Optional[Union[dlp.DeleteDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_deidentify_template(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest, dict]]): - The request object. Request message for - DeleteDeidentifyTemplate. - name (:class:`str`): - Required. Resource name of the organization and - deidentify template to be deleted, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_deidentify_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_job_trigger(self, - request: Optional[Union[dlp.CreateJobTriggerRequest, dict]] = None, - *, - parent: Optional[str] = None, - job_trigger: Optional[dlp.JobTrigger] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - job_trigger = dlp_v2.JobTrigger() - job_trigger.status = "CANCELLED" - - request = dlp_v2.CreateJobTriggerRequest( - parent="parent_value", - job_trigger=job_trigger, - ) - - # Make the request - response = await client.create_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateJobTriggerRequest, dict]]): - The request object. Request message for - CreateJobTrigger. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): - Required. The JobTrigger to create. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job_trigger]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job_trigger is not None: - request.job_trigger = job_trigger - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_job_trigger, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_job_trigger(self, - request: Optional[Union[dlp.UpdateJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - job_trigger: Optional[dlp.JobTrigger] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Updates a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_update_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.update_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.UpdateJobTriggerRequest, dict]]): - The request object. Request message for - UpdateJobTrigger. - name (:class:`str`): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): - New JobTrigger value. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, job_trigger, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.UpdateJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if job_trigger is not None: - request.job_trigger = job_trigger - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_job_trigger, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def hybrid_inspect_job_trigger(self, - request: Optional[Union[dlp.HybridInspectJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_hybrid_inspect_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.hybrid_inspect_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest, dict]]): - The request object. Request to search for potentially - sensitive info in a custom location. - name (:class:`str`): - Required. Resource name of the trigger to execute a - hybrid inspect on, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.HybridInspectJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.hybrid_inspect_job_trigger, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job_trigger(self, - request: Optional[Union[dlp.GetJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Gets a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetJobTriggerRequest, dict]]): - The request object. Request message for GetJobTrigger. - name (:class:`str`): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_job_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_job_triggers(self, - request: Optional[Union[dlp.ListJobTriggersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobTriggersAsyncPager: - r"""Lists job triggers. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_job_triggers(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListJobTriggersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_triggers(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListJobTriggersRequest, dict]]): - The request object. Request message for ListJobTriggers. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager: - Response message for ListJobTriggers. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListJobTriggersRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_job_triggers, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobTriggersAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_job_trigger(self, - request: Optional[Union[dlp.DeleteJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteJobTriggerRequest( - name="name_value", - ) - - # Make the request - await client.delete_job_trigger(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteJobTriggerRequest, dict]]): - The request object. Request message for - DeleteJobTrigger. - name (:class:`str`): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_job_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def activate_job_trigger(self, - request: Optional[Union[dlp.ActivateJobTriggerRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_activate_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ActivateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.activate_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ActivateJobTriggerRequest, dict]]): - The request object. Request message for - ActivateJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - request = dlp.ActivateJobTriggerRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.activate_job_trigger, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_dlp_job(self, - request: Optional[Union[dlp.CreateDlpJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - inspect_job: Optional[dlp.InspectJobConfig] = None, - risk_job: Optional[dlp.RiskAnalysisJobConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDlpJobRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateDlpJobRequest, dict]]): - The request object. Request message for - CreateDlpJobRequest. Used to initiate long running jobs - such as calculating risk metrics or inspecting Google - Cloud Storage. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_job (:class:`google.cloud.dlp_v2.types.InspectJobConfig`): - An inspection job scans a storage - repository for InfoTypes. - - This corresponds to the ``inspect_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - risk_job (:class:`google.cloud.dlp_v2.types.RiskAnalysisJobConfig`): - A risk analysis job calculates - re-identification risk metrics for a - BigQuery table. - - This corresponds to the ``risk_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, inspect_job, risk_job]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_job is not None: - request.inspect_job = inspect_job - if risk_job is not None: - request.risk_job = risk_job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_dlp_job, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_dlp_jobs(self, - request: Optional[Union[dlp.ListDlpJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDlpJobsAsyncPager: - r"""Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_dlp_jobs(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListDlpJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_dlp_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListDlpJobsRequest, dict]]): - The request object. The request message for listing DLP - jobs. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager: - The response message for listing DLP - jobs. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListDlpJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_dlp_jobs, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDlpJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_dlp_job(self, - request: Optional[Union[dlp.GetDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetDlpJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetDlpJobRequest, dict]]): - The request object. The request message for - [DlpJobs.GetDlpJob][]. - name (:class:`str`): - Required. The name of the DlpJob - resource. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_dlp_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_dlp_job(self, - request: Optional[Union[dlp.DeleteDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be canceled if possible. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.delete_dlp_job(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteDlpJobRequest, dict]]): - The request object. The request message for deleting a - DLP job. - name (:class:`str`): - Required. The name of the DlpJob - resource to be deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_dlp_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def cancel_dlp_job(self, - request: Optional[Union[dlp.CancelDlpJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_cancel_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CancelDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.cancel_dlp_job(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CancelDlpJobRequest, dict]]): - The request object. The request message for canceling a - DLP job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - request = dlp.CancelDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_dlp_job, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_stored_info_type(self, - request: Optional[Union[dlp.CreateStoredInfoTypeRequest, dict]] = None, - *, - parent: Optional[str] = None, - config: Optional[dlp.StoredInfoTypeConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateStoredInfoTypeRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest, dict]]): - The request object. Request message for - CreateStoredInfoType. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): - Required. Configuration of the - storedInfoType to create. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, config]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if config is not None: - request.config = config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_stored_info_type, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_stored_info_type(self, - request: Optional[Union[dlp.UpdateStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - config: Optional[dlp.StoredInfoTypeConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_update_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.update_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest, dict]]): - The request object. Request message for - UpdateStoredInfoType. - name (:class:`str`): - Required. Resource name of organization and - storedInfoType to be updated, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): - Updated configuration for the - storedInfoType. If not provided, a new - version of the storedInfoType will be - created with the existing configuration. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, config, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.UpdateStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if config is not None: - request.config = config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_stored_info_type, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_stored_info_type(self, - request: Optional[Union[dlp.GetStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Gets a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetStoredInfoTypeRequest, dict]]): - The request object. Request message for - GetStoredInfoType. - name (:class:`str`): - Required. Resource name of the organization and - storedInfoType to be read, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_stored_info_type, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_stored_info_types(self, - request: Optional[Union[dlp.ListStoredInfoTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListStoredInfoTypesAsyncPager: - r"""Lists stored infoTypes. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_stored_info_types(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListStoredInfoTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_stored_info_types(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListStoredInfoTypesRequest, dict]]): - The request object. Request message for - ListStoredInfoTypes. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager: - Response message for - ListStoredInfoTypes. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListStoredInfoTypesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_stored_info_types, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListStoredInfoTypesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_stored_info_type(self, - request: Optional[Union[dlp.DeleteStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - await client.delete_stored_info_type(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest, dict]]): - The request object. Request message for - DeleteStoredInfoType. - name (:class:`str`): - Required. Resource name of the organization and - storedInfoType to be deleted, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_stored_info_type, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def hybrid_inspect_dlp_job(self, - request: Optional[Union[dlp.HybridInspectDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a job. - To review the findings, inspect the job. Inspection will - occur asynchronously. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_hybrid_inspect_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectDlpJobRequest( - name="name_value", - ) - - # Make the request - response = await client.hybrid_inspect_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.HybridInspectDlpJobRequest, dict]]): - The request object. Request to search for potentially - sensitive info in a custom location. - name (:class:`str`): - Required. Resource name of the job to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/dlpJob/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.HybridInspectDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.hybrid_inspect_dlp_job, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def finish_dlp_job(self, - request: Optional[Union[dlp.FinishDlpJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_finish_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.FinishDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.finish_dlp_job(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.FinishDlpJobRequest, dict]]): - The request object. The request message for finishing a - DLP hybrid job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - request = dlp.FinishDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.finish_dlp_job, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DlpServiceAsyncClient", -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py deleted file mode 100644 index 42eacf85..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py +++ /dev/null @@ -1,4267 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.dlp_v2 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dlp_v2.services.dlp_service import pagers -from google.cloud.dlp_v2.types import dlp -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DlpServiceGrpcTransport -from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport -from .transports.rest import DlpServiceRestTransport - - -class DlpServiceClientMeta(type): - """Metaclass for the DlpService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] - _transport_registry["grpc"] = DlpServiceGrpcTransport - _transport_registry["grpc_asyncio"] = DlpServiceGrpcAsyncIOTransport - _transport_registry["rest"] = DlpServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DlpServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DlpServiceClient(metaclass=DlpServiceClientMeta): - """The Cloud Data Loss Prevention (DLP) API is a service that - allows clients to detect the presence of Personally Identifiable - Information (PII) and other privacy-sensitive data in - user-supplied, unstructured data streams, like text blocks or - images. - The service also includes methods for sensitive data redaction - and scheduling of data scans on Google Cloud Platform based data - sets. - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "dlp.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DlpServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DlpServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def deidentify_template_path(organization: str,deidentify_template: str,) -> str: - """Returns a fully-qualified deidentify_template string.""" - return "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) - - @staticmethod - def parse_deidentify_template_path(path: str) -> Dict[str,str]: - """Parses a deidentify_template path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/deidentifyTemplates/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def dlp_content_path(project: str,) -> str: - """Returns a fully-qualified dlp_content string.""" - return "projects/{project}/dlpContent".format(project=project, ) - - @staticmethod - def parse_dlp_content_path(path: str) -> Dict[str,str]: - """Parses a dlp_content path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/dlpContent$", path) - return m.groupdict() if m else {} - - @staticmethod - def dlp_job_path(project: str,dlp_job: str,) -> str: - """Returns a fully-qualified dlp_job string.""" - return "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) - - @staticmethod - def parse_dlp_job_path(path: str) -> Dict[str,str]: - """Parses a dlp_job path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/dlpJobs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def finding_path(project: str,location: str,finding: str,) -> str: - """Returns a fully-qualified finding string.""" - return "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) - - @staticmethod - def parse_finding_path(path: str) -> Dict[str,str]: - """Parses a finding path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/findings/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def inspect_template_path(organization: str,inspect_template: str,) -> str: - """Returns a fully-qualified inspect_template string.""" - return "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) - - @staticmethod - def parse_inspect_template_path(path: str) -> Dict[str,str]: - """Parses a inspect_template path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/inspectTemplates/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def job_trigger_path(project: str,job_trigger: str,) -> str: - """Returns a fully-qualified job_trigger string.""" - return "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) - - @staticmethod - def parse_job_trigger_path(path: str) -> Dict[str,str]: - """Parses a job_trigger path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/jobTriggers/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def stored_info_type_path(organization: str,stored_info_type: str,) -> str: - """Returns a fully-qualified stored_info_type string.""" - return "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) - - @staticmethod - def parse_stored_info_type_path(path: str) -> Dict[str,str]: - """Parses a stored_info_type path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/storedInfoTypes/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DlpServiceTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the dlp service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, DlpServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, DlpServiceTransport): - # transport is a DlpServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def inspect_content(self, - request: Optional[Union[dlp.InspectContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectContentResponse: - r"""Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - For how to guides, see - https://cloud.google.com/dlp/docs/inspecting-images and - https://cloud.google.com/dlp/docs/inspecting-text, - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_inspect_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.InspectContentRequest( - ) - - # Make the request - response = client.inspect_content(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.InspectContentRequest, dict]): - The request object. Request to search for potentially - sensitive info in a ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectContentResponse: - Results of inspecting an item. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.InspectContentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.InspectContentRequest): - request = dlp.InspectContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.inspect_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def redact_image(self, - request: Optional[Union[dlp.RedactImageRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.RedactImageResponse: - r"""Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/dlp/docs/redacting-sensitive-data-images - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_redact_image(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.RedactImageRequest( - ) - - # Make the request - response = client.redact_image(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.RedactImageRequest, dict]): - The request object. Request to search for potentially - sensitive info in an image and redact it by covering it - with a colored rectangle. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.RedactImageResponse: - Results of redacting an image. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.RedactImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.RedactImageRequest): - request = dlp.RedactImageRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.redact_image] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def deidentify_content(self, - request: Optional[Union[dlp.DeidentifyContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyContentResponse: - r"""De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/dlp/docs/deidentify-sensitive-data - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_deidentify_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeidentifyContentRequest( - ) - - # Make the request - response = client.deidentify_content(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.DeidentifyContentRequest, dict]): - The request object. Request to de-identify a - ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyContentResponse: - Results of de-identifying a - ContentItem. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeidentifyContentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeidentifyContentRequest): - request = dlp.DeidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.deidentify_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def reidentify_content(self, - request: Optional[Union[dlp.ReidentifyContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.ReidentifyContentResponse: - r"""Re-identifies content that has been de-identified. See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_reidentify_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ReidentifyContentRequest( - parent="parent_value", - ) - - # Make the request - response = client.reidentify_content(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ReidentifyContentRequest, dict]): - The request object. Request to re-identify an item. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.ReidentifyContentResponse: - Results of re-identifying an item. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ReidentifyContentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ReidentifyContentRequest): - request = dlp.ReidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.reidentify_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_info_types(self, - request: Optional[Union[dlp.ListInfoTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.ListInfoTypesResponse: - r"""Returns a list of the sensitive information types - that DLP API supports. See - https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_info_types(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListInfoTypesRequest( - ) - - # Make the request - response = client.list_info_types(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListInfoTypesRequest, dict]): - The request object. Request for the list of infoTypes. - parent (str): - The parent resource name. - - The format of this value is as follows: - - :: - - locations/LOCATION_ID - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.ListInfoTypesResponse: - Response to the ListInfoTypes - request. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListInfoTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListInfoTypesRequest): - request = dlp.ListInfoTypesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_info_types] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_inspect_template(self, - request: Optional[Union[dlp.CreateInspectTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - inspect_template: Optional[dlp.InspectTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Creates an InspectTemplate for reusing frequently - used configuration for inspecting content, images, and - storage. See - https://cloud.google.com/dlp/docs/creating-templates to - learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateInspectTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateInspectTemplateRequest, dict]): - The request object. Request message for - CreateInspectTemplate. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - Required. The InspectTemplate to - create. - - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, inspect_template]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateInspectTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateInspectTemplateRequest): - request = dlp.CreateInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_template is not None: - request.inspect_template = inspect_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_inspect_template(self, - request: Optional[Union[dlp.UpdateInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - inspect_template: Optional[dlp.InspectTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_update_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.update_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.UpdateInspectTemplateRequest, dict]): - The request object. Request message for - UpdateInspectTemplate. - name (str): - Required. Resource name of organization and - inspectTemplate to be updated, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - New InspectTemplate value. - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, inspect_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.UpdateInspectTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.UpdateInspectTemplateRequest): - request = dlp.UpdateInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if inspect_template is not None: - request.inspect_template = inspect_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_inspect_template(self, - request: Optional[Union[dlp.GetInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetInspectTemplateRequest, dict]): - The request object. Request message for - GetInspectTemplate. - name (str): - Required. Resource name of the organization and - inspectTemplate to be read, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetInspectTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetInspectTemplateRequest): - request = dlp.GetInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_inspect_templates(self, - request: Optional[Union[dlp.ListInspectTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInspectTemplatesPager: - r"""Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_inspect_templates(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListInspectTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_inspect_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListInspectTemplatesRequest, dict]): - The request object. Request message for - ListInspectTemplates. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager: - Response message for - ListInspectTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListInspectTemplatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListInspectTemplatesRequest): - request = dlp.ListInspectTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_inspect_templates] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInspectTemplatesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_inspect_template(self, - request: Optional[Union[dlp.DeleteInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteInspectTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_inspect_template(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteInspectTemplateRequest, dict]): - The request object. Request message for - DeleteInspectTemplate. - name (str): - Required. Resource name of the organization and - inspectTemplate to be deleted, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteInspectTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteInspectTemplateRequest): - request = dlp.DeleteInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_deidentify_template(self, - request: Optional[Union[dlp.CreateDeidentifyTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - deidentify_template: Optional[dlp.DeidentifyTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Creates a DeidentifyTemplate for reusing frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDeidentifyTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest, dict]): - The request object. Request message for - CreateDeidentifyTemplate. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - Required. The DeidentifyTemplate to - create. - - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, deidentify_template]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateDeidentifyTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateDeidentifyTemplateRequest): - request = dlp.CreateDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if deidentify_template is not None: - request.deidentify_template = deidentify_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_deidentify_template(self, - request: Optional[Union[dlp.UpdateDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - deidentify_template: Optional[dlp.DeidentifyTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Updates the DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_update_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.update_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest, dict]): - The request object. Request message for - UpdateDeidentifyTemplate. - name (str): - Required. Resource name of organization and deidentify - template to be updated, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - New DeidentifyTemplate value. - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, deidentify_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.UpdateDeidentifyTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.UpdateDeidentifyTemplateRequest): - request = dlp.UpdateDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if deidentify_template is not None: - request.deidentify_template = deidentify_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_deidentify_template(self, - request: Optional[Union[dlp.GetDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Gets a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest, dict]): - The request object. Request message for - GetDeidentifyTemplate. - name (str): - Required. Resource name of the organization and - deidentify template to be read, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetDeidentifyTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetDeidentifyTemplateRequest): - request = dlp.GetDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_deidentify_templates(self, - request: Optional[Union[dlp.ListDeidentifyTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDeidentifyTemplatesPager: - r"""Lists DeidentifyTemplates. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_deidentify_templates(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListDeidentifyTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_deidentify_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest, dict]): - The request object. Request message for - ListDeidentifyTemplates. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager: - Response message for - ListDeidentifyTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListDeidentifyTemplatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListDeidentifyTemplatesRequest): - request = dlp.ListDeidentifyTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_deidentify_templates] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDeidentifyTemplatesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_deidentify_template(self, - request: Optional[Union[dlp.DeleteDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_deidentify_template(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest, dict]): - The request object. Request message for - DeleteDeidentifyTemplate. - name (str): - Required. Resource name of the organization and - deidentify template to be deleted, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteDeidentifyTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteDeidentifyTemplateRequest): - request = dlp.DeleteDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_job_trigger(self, - request: Optional[Union[dlp.CreateJobTriggerRequest, dict]] = None, - *, - parent: Optional[str] = None, - job_trigger: Optional[dlp.JobTrigger] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - job_trigger = dlp_v2.JobTrigger() - job_trigger.status = "CANCELLED" - - request = dlp_v2.CreateJobTriggerRequest( - parent="parent_value", - job_trigger=job_trigger, - ) - - # Make the request - response = client.create_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateJobTriggerRequest, dict]): - The request object. Request message for - CreateJobTrigger. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - Required. The JobTrigger to create. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job_trigger]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateJobTriggerRequest): - request = dlp.CreateJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job_trigger is not None: - request.job_trigger = job_trigger - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_job_trigger(self, - request: Optional[Union[dlp.UpdateJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - job_trigger: Optional[dlp.JobTrigger] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Updates a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_update_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.update_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.UpdateJobTriggerRequest, dict]): - The request object. Request message for - UpdateJobTrigger. - name (str): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - New JobTrigger value. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, job_trigger, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.UpdateJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.UpdateJobTriggerRequest): - request = dlp.UpdateJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if job_trigger is not None: - request.job_trigger = job_trigger - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def hybrid_inspect_job_trigger(self, - request: Optional[Union[dlp.HybridInspectJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_hybrid_inspect_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.hybrid_inspect_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest, dict]): - The request object. Request to search for potentially - sensitive info in a custom location. - name (str): - Required. Resource name of the trigger to execute a - hybrid inspect on, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.HybridInspectJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.HybridInspectJobTriggerRequest): - request = dlp.HybridInspectJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job_trigger(self, - request: Optional[Union[dlp.GetJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Gets a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.get_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetJobTriggerRequest, dict]): - The request object. Request message for GetJobTrigger. - name (str): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetJobTriggerRequest): - request = dlp.GetJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_job_triggers(self, - request: Optional[Union[dlp.ListJobTriggersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobTriggersPager: - r"""Lists job triggers. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_job_triggers(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListJobTriggersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_triggers(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListJobTriggersRequest, dict]): - The request object. Request message for ListJobTriggers. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager: - Response message for ListJobTriggers. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListJobTriggersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListJobTriggersRequest): - request = dlp.ListJobTriggersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_job_triggers] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobTriggersPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_job_trigger(self, - request: Optional[Union[dlp.DeleteJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteJobTriggerRequest( - name="name_value", - ) - - # Make the request - client.delete_job_trigger(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteJobTriggerRequest, dict]): - The request object. Request message for - DeleteJobTrigger. - name (str): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteJobTriggerRequest): - request = dlp.DeleteJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def activate_job_trigger(self, - request: Optional[Union[dlp.ActivateJobTriggerRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_activate_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ActivateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.activate_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ActivateJobTriggerRequest, dict]): - The request object. Request message for - ActivateJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ActivateJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ActivateJobTriggerRequest): - request = dlp.ActivateJobTriggerRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.activate_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_dlp_job(self, - request: Optional[Union[dlp.CreateDlpJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - inspect_job: Optional[dlp.InspectJobConfig] = None, - risk_job: Optional[dlp.RiskAnalysisJobConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDlpJobRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateDlpJobRequest, dict]): - The request object. Request message for - CreateDlpJobRequest. Used to initiate long running jobs - such as calculating risk metrics or inspecting Google - Cloud Storage. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): - An inspection job scans a storage - repository for InfoTypes. - - This corresponds to the ``inspect_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): - A risk analysis job calculates - re-identification risk metrics for a - BigQuery table. - - This corresponds to the ``risk_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, inspect_job, risk_job]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateDlpJobRequest): - request = dlp.CreateDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_job is not None: - request.inspect_job = inspect_job - if risk_job is not None: - request.risk_job = risk_job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_dlp_jobs(self, - request: Optional[Union[dlp.ListDlpJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDlpJobsPager: - r"""Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_dlp_jobs(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListDlpJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_dlp_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListDlpJobsRequest, dict]): - The request object. The request message for listing DLP - jobs. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager: - The response message for listing DLP - jobs. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListDlpJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListDlpJobsRequest): - request = dlp.ListDlpJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_dlp_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDlpJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_dlp_job(self, - request: Optional[Union[dlp.GetDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetDlpJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetDlpJobRequest, dict]): - The request object. The request message for - [DlpJobs.GetDlpJob][]. - name (str): - Required. The name of the DlpJob - resource. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetDlpJobRequest): - request = dlp.GetDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_dlp_job(self, - request: Optional[Union[dlp.DeleteDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be canceled if possible. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDlpJobRequest( - name="name_value", - ) - - # Make the request - client.delete_dlp_job(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteDlpJobRequest, dict]): - The request object. The request message for deleting a - DLP job. - name (str): - Required. The name of the DlpJob - resource to be deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteDlpJobRequest): - request = dlp.DeleteDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def cancel_dlp_job(self, - request: Optional[Union[dlp.CancelDlpJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_cancel_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CancelDlpJobRequest( - name="name_value", - ) - - # Make the request - client.cancel_dlp_job(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.CancelDlpJobRequest, dict]): - The request object. The request message for canceling a - DLP job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CancelDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CancelDlpJobRequest): - request = dlp.CancelDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_stored_info_type(self, - request: Optional[Union[dlp.CreateStoredInfoTypeRequest, dict]] = None, - *, - parent: Optional[str] = None, - config: Optional[dlp.StoredInfoTypeConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateStoredInfoTypeRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest, dict]): - The request object. Request message for - CreateStoredInfoType. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Required. Configuration of the - storedInfoType to create. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, config]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateStoredInfoTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateStoredInfoTypeRequest): - request = dlp.CreateStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if config is not None: - request.config = config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_stored_info_type(self, - request: Optional[Union[dlp.UpdateStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - config: Optional[dlp.StoredInfoTypeConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_update_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = client.update_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest, dict]): - The request object. Request message for - UpdateStoredInfoType. - name (str): - Required. Resource name of organization and - storedInfoType to be updated, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Updated configuration for the - storedInfoType. If not provided, a new - version of the storedInfoType will be - created with the existing configuration. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, config, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.UpdateStoredInfoTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.UpdateStoredInfoTypeRequest): - request = dlp.UpdateStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if config is not None: - request.config = config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_stored_info_type(self, - request: Optional[Union[dlp.GetStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Gets a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = client.get_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetStoredInfoTypeRequest, dict]): - The request object. Request message for - GetStoredInfoType. - name (str): - Required. Resource name of the organization and - storedInfoType to be read, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetStoredInfoTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetStoredInfoTypeRequest): - request = dlp.GetStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_stored_info_types(self, - request: Optional[Union[dlp.ListStoredInfoTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListStoredInfoTypesPager: - r"""Lists stored infoTypes. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_stored_info_types(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListStoredInfoTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_stored_info_types(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListStoredInfoTypesRequest, dict]): - The request object. Request message for - ListStoredInfoTypes. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager: - Response message for - ListStoredInfoTypes. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListStoredInfoTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListStoredInfoTypesRequest): - request = dlp.ListStoredInfoTypesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_stored_info_types] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListStoredInfoTypesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_stored_info_type(self, - request: Optional[Union[dlp.DeleteStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - client.delete_stored_info_type(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest, dict]): - The request object. Request message for - DeleteStoredInfoType. - name (str): - Required. Resource name of the organization and - storedInfoType to be deleted, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteStoredInfoTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteStoredInfoTypeRequest): - request = dlp.DeleteStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def hybrid_inspect_dlp_job(self, - request: Optional[Union[dlp.HybridInspectDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a job. - To review the findings, inspect the job. Inspection will - occur asynchronously. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_hybrid_inspect_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectDlpJobRequest( - name="name_value", - ) - - # Make the request - response = client.hybrid_inspect_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.HybridInspectDlpJobRequest, dict]): - The request object. Request to search for potentially - sensitive info in a custom location. - name (str): - Required. Resource name of the job to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/dlpJob/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.HybridInspectDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.HybridInspectDlpJobRequest): - request = dlp.HybridInspectDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def finish_dlp_job(self, - request: Optional[Union[dlp.FinishDlpJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_finish_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.FinishDlpJobRequest( - name="name_value", - ) - - # Make the request - client.finish_dlp_job(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.FinishDlpJobRequest, dict]): - The request object. The request message for finishing a - DLP hybrid job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.FinishDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.FinishDlpJobRequest): - request = dlp.FinishDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.finish_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def __enter__(self) -> "DlpServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DlpServiceClient", -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py deleted file mode 100644 index 73a0e48f..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py +++ /dev/null @@ -1,623 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.dlp_v2.types import dlp - - -class ListInspectTemplatesPager: - """A pager for iterating through ``list_inspect_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``inspect_templates`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInspectTemplates`` requests and continue to iterate - through the ``inspect_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListInspectTemplatesResponse], - request: dlp.ListInspectTemplatesRequest, - response: dlp.ListInspectTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListInspectTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListInspectTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.InspectTemplate]: - for page in self.pages: - yield from page.inspect_templates - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInspectTemplatesAsyncPager: - """A pager for iterating through ``list_inspect_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``inspect_templates`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInspectTemplates`` requests and continue to iterate - through the ``inspect_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListInspectTemplatesResponse]], - request: dlp.ListInspectTemplatesRequest, - response: dlp.ListInspectTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListInspectTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListInspectTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.InspectTemplate]: - async def async_generator(): - async for page in self.pages: - for response in page.inspect_templates: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDeidentifyTemplatesPager: - """A pager for iterating through ``list_deidentify_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``deidentify_templates`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDeidentifyTemplates`` requests and continue to iterate - through the ``deidentify_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListDeidentifyTemplatesResponse], - request: dlp.ListDeidentifyTemplatesRequest, - response: dlp.ListDeidentifyTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListDeidentifyTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListDeidentifyTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.DeidentifyTemplate]: - for page in self.pages: - yield from page.deidentify_templates - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDeidentifyTemplatesAsyncPager: - """A pager for iterating through ``list_deidentify_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``deidentify_templates`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDeidentifyTemplates`` requests and continue to iterate - through the ``deidentify_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListDeidentifyTemplatesResponse]], - request: dlp.ListDeidentifyTemplatesRequest, - response: dlp.ListDeidentifyTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListDeidentifyTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListDeidentifyTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.DeidentifyTemplate]: - async def async_generator(): - async for page in self.pages: - for response in page.deidentify_templates: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobTriggersPager: - """A pager for iterating through ``list_job_triggers`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and - provides an ``__iter__`` method to iterate through its - ``job_triggers`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobTriggers`` requests and continue to iterate - through the ``job_triggers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListJobTriggersResponse], - request: dlp.ListJobTriggersRequest, - response: dlp.ListJobTriggersResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListJobTriggersRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListJobTriggersResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListJobTriggersRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListJobTriggersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.JobTrigger]: - for page in self.pages: - yield from page.job_triggers - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobTriggersAsyncPager: - """A pager for iterating through ``list_job_triggers`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``job_triggers`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobTriggers`` requests and continue to iterate - through the ``job_triggers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListJobTriggersResponse]], - request: dlp.ListJobTriggersRequest, - response: dlp.ListJobTriggersResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListJobTriggersRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListJobTriggersResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListJobTriggersRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListJobTriggersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.JobTrigger]: - async def async_generator(): - async for page in self.pages: - for response in page.job_triggers: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDlpJobsPager: - """A pager for iterating through ``list_dlp_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDlpJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListDlpJobsResponse], - request: dlp.ListDlpJobsRequest, - response: dlp.ListDlpJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDlpJobsRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDlpJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListDlpJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListDlpJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.DlpJob]: - for page in self.pages: - yield from page.jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDlpJobsAsyncPager: - """A pager for iterating through ``list_dlp_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDlpJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListDlpJobsResponse]], - request: dlp.ListDlpJobsRequest, - response: dlp.ListDlpJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDlpJobsRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDlpJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListDlpJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListDlpJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.DlpJob]: - async def async_generator(): - async for page in self.pages: - for response in page.jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListStoredInfoTypesPager: - """A pager for iterating through ``list_stored_info_types`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``stored_info_types`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListStoredInfoTypes`` requests and continue to iterate - through the ``stored_info_types`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListStoredInfoTypesResponse], - request: dlp.ListStoredInfoTypesRequest, - response: dlp.ListStoredInfoTypesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListStoredInfoTypesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListStoredInfoTypesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.StoredInfoType]: - for page in self.pages: - yield from page.stored_info_types - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListStoredInfoTypesAsyncPager: - """A pager for iterating through ``list_stored_info_types`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``stored_info_types`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListStoredInfoTypes`` requests and continue to iterate - through the ``stored_info_types`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListStoredInfoTypesResponse]], - request: dlp.ListStoredInfoTypesRequest, - response: dlp.ListStoredInfoTypesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListStoredInfoTypesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListStoredInfoTypesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.StoredInfoType]: - async def async_generator(): - async for page in self.pages: - for response in page.stored_info_types: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py deleted file mode 100644 index df9b4279..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DlpServiceTransport -from .grpc import DlpServiceGrpcTransport -from .grpc_asyncio import DlpServiceGrpcAsyncIOTransport -from .rest import DlpServiceRestTransport -from .rest import DlpServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] -_transport_registry['grpc'] = DlpServiceGrpcTransport -_transport_registry['grpc_asyncio'] = DlpServiceGrpcAsyncIOTransport -_transport_registry['rest'] = DlpServiceRestTransport - -__all__ = ( - 'DlpServiceTransport', - 'DlpServiceGrpcTransport', - 'DlpServiceGrpcAsyncIOTransport', - 'DlpServiceRestTransport', - 'DlpServiceRestInterceptor', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py deleted file mode 100644 index 290f4cdf..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py +++ /dev/null @@ -1,751 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dlp_v2 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dlp_v2.types import dlp -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class DlpServiceTransport(abc.ABC): - """Abstract transport class for DlpService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dlp.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.inspect_content: gapic_v1.method.wrap_method( - self.inspect_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.redact_image: gapic_v1.method.wrap_method( - self.redact_image, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.deidentify_content: gapic_v1.method.wrap_method( - self.deidentify_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.reidentify_content: gapic_v1.method.wrap_method( - self.reidentify_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_info_types: gapic_v1.method.wrap_method( - self.list_info_types, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_inspect_template: gapic_v1.method.wrap_method( - self.create_inspect_template, - default_timeout=300.0, - client_info=client_info, - ), - self.update_inspect_template: gapic_v1.method.wrap_method( - self.update_inspect_template, - default_timeout=300.0, - client_info=client_info, - ), - self.get_inspect_template: gapic_v1.method.wrap_method( - self.get_inspect_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_inspect_templates: gapic_v1.method.wrap_method( - self.list_inspect_templates, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_inspect_template: gapic_v1.method.wrap_method( - self.delete_inspect_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_deidentify_template: gapic_v1.method.wrap_method( - self.create_deidentify_template, - default_timeout=300.0, - client_info=client_info, - ), - self.update_deidentify_template: gapic_v1.method.wrap_method( - self.update_deidentify_template, - default_timeout=300.0, - client_info=client_info, - ), - self.get_deidentify_template: gapic_v1.method.wrap_method( - self.get_deidentify_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_deidentify_templates: gapic_v1.method.wrap_method( - self.list_deidentify_templates, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_deidentify_template: gapic_v1.method.wrap_method( - self.delete_deidentify_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_job_trigger: gapic_v1.method.wrap_method( - self.create_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.update_job_trigger: gapic_v1.method.wrap_method( - self.update_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.hybrid_inspect_job_trigger: gapic_v1.method.wrap_method( - self.hybrid_inspect_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.get_job_trigger: gapic_v1.method.wrap_method( - self.get_job_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_job_triggers: gapic_v1.method.wrap_method( - self.list_job_triggers, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_job_trigger: gapic_v1.method.wrap_method( - self.delete_job_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.activate_job_trigger: gapic_v1.method.wrap_method( - self.activate_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.create_dlp_job: gapic_v1.method.wrap_method( - self.create_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.list_dlp_jobs: gapic_v1.method.wrap_method( - self.list_dlp_jobs, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_dlp_job: gapic_v1.method.wrap_method( - self.get_dlp_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_dlp_job: gapic_v1.method.wrap_method( - self.delete_dlp_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.cancel_dlp_job: gapic_v1.method.wrap_method( - self.cancel_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.create_stored_info_type: gapic_v1.method.wrap_method( - self.create_stored_info_type, - default_timeout=300.0, - client_info=client_info, - ), - self.update_stored_info_type: gapic_v1.method.wrap_method( - self.update_stored_info_type, - default_timeout=300.0, - client_info=client_info, - ), - self.get_stored_info_type: gapic_v1.method.wrap_method( - self.get_stored_info_type, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_stored_info_types: gapic_v1.method.wrap_method( - self.list_stored_info_types, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_stored_info_type: gapic_v1.method.wrap_method( - self.delete_stored_info_type, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.hybrid_inspect_dlp_job: gapic_v1.method.wrap_method( - self.hybrid_inspect_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.finish_dlp_job: gapic_v1.method.wrap_method( - self.finish_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - Union[ - dlp.InspectContentResponse, - Awaitable[dlp.InspectContentResponse] - ]]: - raise NotImplementedError() - - @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - Union[ - dlp.RedactImageResponse, - Awaitable[dlp.RedactImageResponse] - ]]: - raise NotImplementedError() - - @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - Union[ - dlp.DeidentifyContentResponse, - Awaitable[dlp.DeidentifyContentResponse] - ]]: - raise NotImplementedError() - - @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - Union[ - dlp.ReidentifyContentResponse, - Awaitable[dlp.ReidentifyContentResponse] - ]]: - raise NotImplementedError() - - @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - Union[ - dlp.ListInfoTypesResponse, - Awaitable[dlp.ListInfoTypesResponse] - ]]: - raise NotImplementedError() - - @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - Union[ - dlp.InspectTemplate, - Awaitable[dlp.InspectTemplate] - ]]: - raise NotImplementedError() - - @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - Union[ - dlp.InspectTemplate, - Awaitable[dlp.InspectTemplate] - ]]: - raise NotImplementedError() - - @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - Union[ - dlp.InspectTemplate, - Awaitable[dlp.InspectTemplate] - ]]: - raise NotImplementedError() - - @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - Union[ - dlp.ListInspectTemplatesResponse, - Awaitable[dlp.ListInspectTemplatesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - Union[ - dlp.DeidentifyTemplate, - Awaitable[dlp.DeidentifyTemplate] - ]]: - raise NotImplementedError() - - @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - Union[ - dlp.DeidentifyTemplate, - Awaitable[dlp.DeidentifyTemplate] - ]]: - raise NotImplementedError() - - @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - Union[ - dlp.DeidentifyTemplate, - Awaitable[dlp.DeidentifyTemplate] - ]]: - raise NotImplementedError() - - @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - Union[ - dlp.ListDeidentifyTemplatesResponse, - Awaitable[dlp.ListDeidentifyTemplatesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - Union[ - dlp.JobTrigger, - Awaitable[dlp.JobTrigger] - ]]: - raise NotImplementedError() - - @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - Union[ - dlp.JobTrigger, - Awaitable[dlp.JobTrigger] - ]]: - raise NotImplementedError() - - @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - Union[ - dlp.HybridInspectResponse, - Awaitable[dlp.HybridInspectResponse] - ]]: - raise NotImplementedError() - - @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - Union[ - dlp.JobTrigger, - Awaitable[dlp.JobTrigger] - ]]: - raise NotImplementedError() - - @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - Union[ - dlp.ListJobTriggersResponse, - Awaitable[dlp.ListJobTriggersResponse] - ]]: - raise NotImplementedError() - - @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - Union[ - dlp.DlpJob, - Awaitable[dlp.DlpJob] - ]]: - raise NotImplementedError() - - @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - Union[ - dlp.DlpJob, - Awaitable[dlp.DlpJob] - ]]: - raise NotImplementedError() - - @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - Union[ - dlp.ListDlpJobsResponse, - Awaitable[dlp.ListDlpJobsResponse] - ]]: - raise NotImplementedError() - - @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - Union[ - dlp.DlpJob, - Awaitable[dlp.DlpJob] - ]]: - raise NotImplementedError() - - @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - Union[ - dlp.StoredInfoType, - Awaitable[dlp.StoredInfoType] - ]]: - raise NotImplementedError() - - @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - Union[ - dlp.StoredInfoType, - Awaitable[dlp.StoredInfoType] - ]]: - raise NotImplementedError() - - @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - Union[ - dlp.StoredInfoType, - Awaitable[dlp.StoredInfoType] - ]]: - raise NotImplementedError() - - @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - Union[ - dlp.ListStoredInfoTypesResponse, - Awaitable[dlp.ListStoredInfoTypesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - Union[ - dlp.HybridInspectResponse, - Awaitable[dlp.HybridInspectResponse] - ]]: - raise NotImplementedError() - - @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DlpServiceTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py deleted file mode 100644 index 81be6a63..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py +++ /dev/null @@ -1,1261 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dlp_v2.types import dlp -from google.protobuf import empty_pb2 # type: ignore -from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO - - -class DlpServiceGrpcTransport(DlpServiceTransport): - """gRPC backend transport for DlpService. - - The Cloud Data Loss Prevention (DLP) API is a service that - allows clients to detect the presence of Personally Identifiable - Information (PII) and other privacy-sensitive data in - user-supplied, unstructured data streams, like text blocks or - images. - The service also includes methods for sensitive data redaction - and scheduling of data scans on Google Cloud Platform based data - sets. - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - dlp.InspectContentResponse]: - r"""Return a callable for the inspect content method over gRPC. - - Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - For how to guides, see - https://cloud.google.com/dlp/docs/inspecting-images and - https://cloud.google.com/dlp/docs/inspecting-text, - - Returns: - Callable[[~.InspectContentRequest], - ~.InspectContentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'inspect_content' not in self._stubs: - self._stubs['inspect_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/InspectContent', - request_serializer=dlp.InspectContentRequest.serialize, - response_deserializer=dlp.InspectContentResponse.deserialize, - ) - return self._stubs['inspect_content'] - - @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - dlp.RedactImageResponse]: - r"""Return a callable for the redact image method over gRPC. - - Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/dlp/docs/redacting-sensitive-data-images - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.RedactImageRequest], - ~.RedactImageResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'redact_image' not in self._stubs: - self._stubs['redact_image'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/RedactImage', - request_serializer=dlp.RedactImageRequest.serialize, - response_deserializer=dlp.RedactImageResponse.deserialize, - ) - return self._stubs['redact_image'] - - @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - dlp.DeidentifyContentResponse]: - r"""Return a callable for the deidentify content method over gRPC. - - De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/dlp/docs/deidentify-sensitive-data - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.DeidentifyContentRequest], - ~.DeidentifyContentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'deidentify_content' not in self._stubs: - self._stubs['deidentify_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeidentifyContent', - request_serializer=dlp.DeidentifyContentRequest.serialize, - response_deserializer=dlp.DeidentifyContentResponse.deserialize, - ) - return self._stubs['deidentify_content'] - - @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - dlp.ReidentifyContentResponse]: - r"""Return a callable for the reidentify content method over gRPC. - - Re-identifies content that has been de-identified. See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - Returns: - Callable[[~.ReidentifyContentRequest], - ~.ReidentifyContentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'reidentify_content' not in self._stubs: - self._stubs['reidentify_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ReidentifyContent', - request_serializer=dlp.ReidentifyContentRequest.serialize, - response_deserializer=dlp.ReidentifyContentResponse.deserialize, - ) - return self._stubs['reidentify_content'] - - @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - dlp.ListInfoTypesResponse]: - r"""Return a callable for the list info types method over gRPC. - - Returns a list of the sensitive information types - that DLP API supports. See - https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - - Returns: - Callable[[~.ListInfoTypesRequest], - ~.ListInfoTypesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_info_types' not in self._stubs: - self._stubs['list_info_types'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInfoTypes', - request_serializer=dlp.ListInfoTypesRequest.serialize, - response_deserializer=dlp.ListInfoTypesResponse.deserialize, - ) - return self._stubs['list_info_types'] - - @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - dlp.InspectTemplate]: - r"""Return a callable for the create inspect template method over gRPC. - - Creates an InspectTemplate for reusing frequently - used configuration for inspecting content, images, and - storage. See - https://cloud.google.com/dlp/docs/creating-templates to - learn more. - - Returns: - Callable[[~.CreateInspectTemplateRequest], - ~.InspectTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_inspect_template' not in self._stubs: - self._stubs['create_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', - request_serializer=dlp.CreateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['create_inspect_template'] - - @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - dlp.InspectTemplate]: - r"""Return a callable for the update inspect template method over gRPC. - - Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.UpdateInspectTemplateRequest], - ~.InspectTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_inspect_template' not in self._stubs: - self._stubs['update_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', - request_serializer=dlp.UpdateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['update_inspect_template'] - - @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - dlp.InspectTemplate]: - r"""Return a callable for the get inspect template method over gRPC. - - Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.GetInspectTemplateRequest], - ~.InspectTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_inspect_template' not in self._stubs: - self._stubs['get_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', - request_serializer=dlp.GetInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['get_inspect_template'] - - @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - dlp.ListInspectTemplatesResponse]: - r"""Return a callable for the list inspect templates method over gRPC. - - Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.ListInspectTemplatesRequest], - ~.ListInspectTemplatesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_inspect_templates' not in self._stubs: - self._stubs['list_inspect_templates'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', - request_serializer=dlp.ListInspectTemplatesRequest.serialize, - response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, - ) - return self._stubs['list_inspect_templates'] - - @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete inspect template method over gRPC. - - Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.DeleteInspectTemplateRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_inspect_template' not in self._stubs: - self._stubs['delete_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', - request_serializer=dlp.DeleteInspectTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_inspect_template'] - - @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - r"""Return a callable for the create deidentify template method over gRPC. - - Creates a DeidentifyTemplate for reusing frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.CreateDeidentifyTemplateRequest], - ~.DeidentifyTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_deidentify_template' not in self._stubs: - self._stubs['create_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', - request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['create_deidentify_template'] - - @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - r"""Return a callable for the update deidentify template method over gRPC. - - Updates the DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.UpdateDeidentifyTemplateRequest], - ~.DeidentifyTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_deidentify_template' not in self._stubs: - self._stubs['update_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', - request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['update_deidentify_template'] - - @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - r"""Return a callable for the get deidentify template method over gRPC. - - Gets a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.GetDeidentifyTemplateRequest], - ~.DeidentifyTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_deidentify_template' not in self._stubs: - self._stubs['get_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', - request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['get_deidentify_template'] - - @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - dlp.ListDeidentifyTemplatesResponse]: - r"""Return a callable for the list deidentify templates method over gRPC. - - Lists DeidentifyTemplates. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.ListDeidentifyTemplatesRequest], - ~.ListDeidentifyTemplatesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_deidentify_templates' not in self._stubs: - self._stubs['list_deidentify_templates'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', - request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, - response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, - ) - return self._stubs['list_deidentify_templates'] - - @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete deidentify template method over gRPC. - - Deletes a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.DeleteDeidentifyTemplateRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_deidentify_template' not in self._stubs: - self._stubs['delete_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', - request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_deidentify_template'] - - @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - dlp.JobTrigger]: - r"""Return a callable for the create job trigger method over gRPC. - - Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.CreateJobTriggerRequest], - ~.JobTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job_trigger' not in self._stubs: - self._stubs['create_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', - request_serializer=dlp.CreateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['create_job_trigger'] - - @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - dlp.JobTrigger]: - r"""Return a callable for the update job trigger method over gRPC. - - Updates a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.UpdateJobTriggerRequest], - ~.JobTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_job_trigger' not in self._stubs: - self._stubs['update_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', - request_serializer=dlp.UpdateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['update_job_trigger'] - - @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - dlp.HybridInspectResponse]: - r"""Return a callable for the hybrid inspect job trigger method over gRPC. - - Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - - Returns: - Callable[[~.HybridInspectJobTriggerRequest], - ~.HybridInspectResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_job_trigger' not in self._stubs: - self._stubs['hybrid_inspect_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', - request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_job_trigger'] - - @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - dlp.JobTrigger]: - r"""Return a callable for the get job trigger method over gRPC. - - Gets a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.GetJobTriggerRequest], - ~.JobTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_trigger' not in self._stubs: - self._stubs['get_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetJobTrigger', - request_serializer=dlp.GetJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['get_job_trigger'] - - @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - dlp.ListJobTriggersResponse]: - r"""Return a callable for the list job triggers method over gRPC. - - Lists job triggers. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.ListJobTriggersRequest], - ~.ListJobTriggersResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_job_triggers' not in self._stubs: - self._stubs['list_job_triggers'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListJobTriggers', - request_serializer=dlp.ListJobTriggersRequest.serialize, - response_deserializer=dlp.ListJobTriggersResponse.deserialize, - ) - return self._stubs['list_job_triggers'] - - @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete job trigger method over gRPC. - - Deletes a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.DeleteJobTriggerRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job_trigger' not in self._stubs: - self._stubs['delete_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', - request_serializer=dlp.DeleteJobTriggerRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job_trigger'] - - @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - dlp.DlpJob]: - r"""Return a callable for the activate job trigger method over gRPC. - - Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - Returns: - Callable[[~.ActivateJobTriggerRequest], - ~.DlpJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'activate_job_trigger' not in self._stubs: - self._stubs['activate_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', - request_serializer=dlp.ActivateJobTriggerRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['activate_job_trigger'] - - @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - dlp.DlpJob]: - r"""Return a callable for the create dlp job method over gRPC. - - Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.CreateDlpJobRequest], - ~.DlpJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_dlp_job' not in self._stubs: - self._stubs['create_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDlpJob', - request_serializer=dlp.CreateDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['create_dlp_job'] - - @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - dlp.ListDlpJobsResponse]: - r"""Return a callable for the list dlp jobs method over gRPC. - - Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.ListDlpJobsRequest], - ~.ListDlpJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_dlp_jobs' not in self._stubs: - self._stubs['list_dlp_jobs'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDlpJobs', - request_serializer=dlp.ListDlpJobsRequest.serialize, - response_deserializer=dlp.ListDlpJobsResponse.deserialize, - ) - return self._stubs['list_dlp_jobs'] - - @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - dlp.DlpJob]: - r"""Return a callable for the get dlp job method over gRPC. - - Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.GetDlpJobRequest], - ~.DlpJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_dlp_job' not in self._stubs: - self._stubs['get_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDlpJob', - request_serializer=dlp.GetDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['get_dlp_job'] - - @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete dlp job method over gRPC. - - Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be canceled if possible. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.DeleteDlpJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_dlp_job' not in self._stubs: - self._stubs['delete_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', - request_serializer=dlp.DeleteDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_dlp_job'] - - @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the cancel dlp job method over gRPC. - - Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.CancelDlpJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_dlp_job' not in self._stubs: - self._stubs['cancel_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CancelDlpJob', - request_serializer=dlp.CancelDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['cancel_dlp_job'] - - @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - dlp.StoredInfoType]: - r"""Return a callable for the create stored info type method over gRPC. - - Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.CreateStoredInfoTypeRequest], - ~.StoredInfoType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_stored_info_type' not in self._stubs: - self._stubs['create_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', - request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['create_stored_info_type'] - - @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - dlp.StoredInfoType]: - r"""Return a callable for the update stored info type method over gRPC. - - Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.UpdateStoredInfoTypeRequest], - ~.StoredInfoType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_stored_info_type' not in self._stubs: - self._stubs['update_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', - request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['update_stored_info_type'] - - @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - dlp.StoredInfoType]: - r"""Return a callable for the get stored info type method over gRPC. - - Gets a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.GetStoredInfoTypeRequest], - ~.StoredInfoType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_stored_info_type' not in self._stubs: - self._stubs['get_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', - request_serializer=dlp.GetStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['get_stored_info_type'] - - @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - dlp.ListStoredInfoTypesResponse]: - r"""Return a callable for the list stored info types method over gRPC. - - Lists stored infoTypes. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.ListStoredInfoTypesRequest], - ~.ListStoredInfoTypesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_stored_info_types' not in self._stubs: - self._stubs['list_stored_info_types'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', - request_serializer=dlp.ListStoredInfoTypesRequest.serialize, - response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, - ) - return self._stubs['list_stored_info_types'] - - @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete stored info type method over gRPC. - - Deletes a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.DeleteStoredInfoTypeRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_stored_info_type' not in self._stubs: - self._stubs['delete_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', - request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_stored_info_type'] - - @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - dlp.HybridInspectResponse]: - r"""Return a callable for the hybrid inspect dlp job method over gRPC. - - Inspect hybrid content and store findings to a job. - To review the findings, inspect the job. Inspection will - occur asynchronously. - - Returns: - Callable[[~.HybridInspectDlpJobRequest], - ~.HybridInspectResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_dlp_job' not in self._stubs: - self._stubs['hybrid_inspect_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', - request_serializer=dlp.HybridInspectDlpJobRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_dlp_job'] - - @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the finish dlp job method over gRPC. - - Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. - - Returns: - Callable[[~.FinishDlpJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'finish_dlp_job' not in self._stubs: - self._stubs['finish_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/FinishDlpJob', - request_serializer=dlp.FinishDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['finish_dlp_job'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DlpServiceGrpcTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py deleted file mode 100644 index be0411f7..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,1260 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dlp_v2.types import dlp -from google.protobuf import empty_pb2 # type: ignore -from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import DlpServiceGrpcTransport - - -class DlpServiceGrpcAsyncIOTransport(DlpServiceTransport): - """gRPC AsyncIO backend transport for DlpService. - - The Cloud Data Loss Prevention (DLP) API is a service that - allows clients to detect the presence of Personally Identifiable - Information (PII) and other privacy-sensitive data in - user-supplied, unstructured data streams, like text blocks or - images. - The service also includes methods for sensitive data redaction - and scheduling of data scans on Google Cloud Platform based data - sets. - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - Awaitable[dlp.InspectContentResponse]]: - r"""Return a callable for the inspect content method over gRPC. - - Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - For how to guides, see - https://cloud.google.com/dlp/docs/inspecting-images and - https://cloud.google.com/dlp/docs/inspecting-text, - - Returns: - Callable[[~.InspectContentRequest], - Awaitable[~.InspectContentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'inspect_content' not in self._stubs: - self._stubs['inspect_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/InspectContent', - request_serializer=dlp.InspectContentRequest.serialize, - response_deserializer=dlp.InspectContentResponse.deserialize, - ) - return self._stubs['inspect_content'] - - @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - Awaitable[dlp.RedactImageResponse]]: - r"""Return a callable for the redact image method over gRPC. - - Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/dlp/docs/redacting-sensitive-data-images - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.RedactImageRequest], - Awaitable[~.RedactImageResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'redact_image' not in self._stubs: - self._stubs['redact_image'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/RedactImage', - request_serializer=dlp.RedactImageRequest.serialize, - response_deserializer=dlp.RedactImageResponse.deserialize, - ) - return self._stubs['redact_image'] - - @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - Awaitable[dlp.DeidentifyContentResponse]]: - r"""Return a callable for the deidentify content method over gRPC. - - De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/dlp/docs/deidentify-sensitive-data - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.DeidentifyContentRequest], - Awaitable[~.DeidentifyContentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'deidentify_content' not in self._stubs: - self._stubs['deidentify_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeidentifyContent', - request_serializer=dlp.DeidentifyContentRequest.serialize, - response_deserializer=dlp.DeidentifyContentResponse.deserialize, - ) - return self._stubs['deidentify_content'] - - @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - Awaitable[dlp.ReidentifyContentResponse]]: - r"""Return a callable for the reidentify content method over gRPC. - - Re-identifies content that has been de-identified. See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - Returns: - Callable[[~.ReidentifyContentRequest], - Awaitable[~.ReidentifyContentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'reidentify_content' not in self._stubs: - self._stubs['reidentify_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ReidentifyContent', - request_serializer=dlp.ReidentifyContentRequest.serialize, - response_deserializer=dlp.ReidentifyContentResponse.deserialize, - ) - return self._stubs['reidentify_content'] - - @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - Awaitable[dlp.ListInfoTypesResponse]]: - r"""Return a callable for the list info types method over gRPC. - - Returns a list of the sensitive information types - that DLP API supports. See - https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - - Returns: - Callable[[~.ListInfoTypesRequest], - Awaitable[~.ListInfoTypesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_info_types' not in self._stubs: - self._stubs['list_info_types'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInfoTypes', - request_serializer=dlp.ListInfoTypesRequest.serialize, - response_deserializer=dlp.ListInfoTypesResponse.deserialize, - ) - return self._stubs['list_info_types'] - - @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - Awaitable[dlp.InspectTemplate]]: - r"""Return a callable for the create inspect template method over gRPC. - - Creates an InspectTemplate for reusing frequently - used configuration for inspecting content, images, and - storage. See - https://cloud.google.com/dlp/docs/creating-templates to - learn more. - - Returns: - Callable[[~.CreateInspectTemplateRequest], - Awaitable[~.InspectTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_inspect_template' not in self._stubs: - self._stubs['create_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', - request_serializer=dlp.CreateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['create_inspect_template'] - - @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - Awaitable[dlp.InspectTemplate]]: - r"""Return a callable for the update inspect template method over gRPC. - - Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.UpdateInspectTemplateRequest], - Awaitable[~.InspectTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_inspect_template' not in self._stubs: - self._stubs['update_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', - request_serializer=dlp.UpdateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['update_inspect_template'] - - @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - Awaitable[dlp.InspectTemplate]]: - r"""Return a callable for the get inspect template method over gRPC. - - Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.GetInspectTemplateRequest], - Awaitable[~.InspectTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_inspect_template' not in self._stubs: - self._stubs['get_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', - request_serializer=dlp.GetInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['get_inspect_template'] - - @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - Awaitable[dlp.ListInspectTemplatesResponse]]: - r"""Return a callable for the list inspect templates method over gRPC. - - Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.ListInspectTemplatesRequest], - Awaitable[~.ListInspectTemplatesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_inspect_templates' not in self._stubs: - self._stubs['list_inspect_templates'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', - request_serializer=dlp.ListInspectTemplatesRequest.serialize, - response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, - ) - return self._stubs['list_inspect_templates'] - - @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete inspect template method over gRPC. - - Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.DeleteInspectTemplateRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_inspect_template' not in self._stubs: - self._stubs['delete_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', - request_serializer=dlp.DeleteInspectTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_inspect_template'] - - @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - Awaitable[dlp.DeidentifyTemplate]]: - r"""Return a callable for the create deidentify template method over gRPC. - - Creates a DeidentifyTemplate for reusing frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.CreateDeidentifyTemplateRequest], - Awaitable[~.DeidentifyTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_deidentify_template' not in self._stubs: - self._stubs['create_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', - request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['create_deidentify_template'] - - @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - Awaitable[dlp.DeidentifyTemplate]]: - r"""Return a callable for the update deidentify template method over gRPC. - - Updates the DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.UpdateDeidentifyTemplateRequest], - Awaitable[~.DeidentifyTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_deidentify_template' not in self._stubs: - self._stubs['update_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', - request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['update_deidentify_template'] - - @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - Awaitable[dlp.DeidentifyTemplate]]: - r"""Return a callable for the get deidentify template method over gRPC. - - Gets a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.GetDeidentifyTemplateRequest], - Awaitable[~.DeidentifyTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_deidentify_template' not in self._stubs: - self._stubs['get_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', - request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['get_deidentify_template'] - - @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - Awaitable[dlp.ListDeidentifyTemplatesResponse]]: - r"""Return a callable for the list deidentify templates method over gRPC. - - Lists DeidentifyTemplates. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.ListDeidentifyTemplatesRequest], - Awaitable[~.ListDeidentifyTemplatesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_deidentify_templates' not in self._stubs: - self._stubs['list_deidentify_templates'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', - request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, - response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, - ) - return self._stubs['list_deidentify_templates'] - - @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete deidentify template method over gRPC. - - Deletes a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.DeleteDeidentifyTemplateRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_deidentify_template' not in self._stubs: - self._stubs['delete_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', - request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_deidentify_template'] - - @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - Awaitable[dlp.JobTrigger]]: - r"""Return a callable for the create job trigger method over gRPC. - - Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.CreateJobTriggerRequest], - Awaitable[~.JobTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job_trigger' not in self._stubs: - self._stubs['create_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', - request_serializer=dlp.CreateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['create_job_trigger'] - - @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - Awaitable[dlp.JobTrigger]]: - r"""Return a callable for the update job trigger method over gRPC. - - Updates a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.UpdateJobTriggerRequest], - Awaitable[~.JobTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_job_trigger' not in self._stubs: - self._stubs['update_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', - request_serializer=dlp.UpdateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['update_job_trigger'] - - @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - Awaitable[dlp.HybridInspectResponse]]: - r"""Return a callable for the hybrid inspect job trigger method over gRPC. - - Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - - Returns: - Callable[[~.HybridInspectJobTriggerRequest], - Awaitable[~.HybridInspectResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_job_trigger' not in self._stubs: - self._stubs['hybrid_inspect_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', - request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_job_trigger'] - - @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - Awaitable[dlp.JobTrigger]]: - r"""Return a callable for the get job trigger method over gRPC. - - Gets a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.GetJobTriggerRequest], - Awaitable[~.JobTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_trigger' not in self._stubs: - self._stubs['get_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetJobTrigger', - request_serializer=dlp.GetJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['get_job_trigger'] - - @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - Awaitable[dlp.ListJobTriggersResponse]]: - r"""Return a callable for the list job triggers method over gRPC. - - Lists job triggers. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.ListJobTriggersRequest], - Awaitable[~.ListJobTriggersResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_job_triggers' not in self._stubs: - self._stubs['list_job_triggers'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListJobTriggers', - request_serializer=dlp.ListJobTriggersRequest.serialize, - response_deserializer=dlp.ListJobTriggersResponse.deserialize, - ) - return self._stubs['list_job_triggers'] - - @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete job trigger method over gRPC. - - Deletes a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.DeleteJobTriggerRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job_trigger' not in self._stubs: - self._stubs['delete_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', - request_serializer=dlp.DeleteJobTriggerRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job_trigger'] - - @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - Awaitable[dlp.DlpJob]]: - r"""Return a callable for the activate job trigger method over gRPC. - - Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - Returns: - Callable[[~.ActivateJobTriggerRequest], - Awaitable[~.DlpJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'activate_job_trigger' not in self._stubs: - self._stubs['activate_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', - request_serializer=dlp.ActivateJobTriggerRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['activate_job_trigger'] - - @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - Awaitable[dlp.DlpJob]]: - r"""Return a callable for the create dlp job method over gRPC. - - Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.CreateDlpJobRequest], - Awaitable[~.DlpJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_dlp_job' not in self._stubs: - self._stubs['create_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDlpJob', - request_serializer=dlp.CreateDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['create_dlp_job'] - - @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - Awaitable[dlp.ListDlpJobsResponse]]: - r"""Return a callable for the list dlp jobs method over gRPC. - - Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.ListDlpJobsRequest], - Awaitable[~.ListDlpJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_dlp_jobs' not in self._stubs: - self._stubs['list_dlp_jobs'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDlpJobs', - request_serializer=dlp.ListDlpJobsRequest.serialize, - response_deserializer=dlp.ListDlpJobsResponse.deserialize, - ) - return self._stubs['list_dlp_jobs'] - - @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - Awaitable[dlp.DlpJob]]: - r"""Return a callable for the get dlp job method over gRPC. - - Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.GetDlpJobRequest], - Awaitable[~.DlpJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_dlp_job' not in self._stubs: - self._stubs['get_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDlpJob', - request_serializer=dlp.GetDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['get_dlp_job'] - - @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete dlp job method over gRPC. - - Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be canceled if possible. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.DeleteDlpJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_dlp_job' not in self._stubs: - self._stubs['delete_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', - request_serializer=dlp.DeleteDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_dlp_job'] - - @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the cancel dlp job method over gRPC. - - Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.CancelDlpJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_dlp_job' not in self._stubs: - self._stubs['cancel_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CancelDlpJob', - request_serializer=dlp.CancelDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['cancel_dlp_job'] - - @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - Awaitable[dlp.StoredInfoType]]: - r"""Return a callable for the create stored info type method over gRPC. - - Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.CreateStoredInfoTypeRequest], - Awaitable[~.StoredInfoType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_stored_info_type' not in self._stubs: - self._stubs['create_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', - request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['create_stored_info_type'] - - @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - Awaitable[dlp.StoredInfoType]]: - r"""Return a callable for the update stored info type method over gRPC. - - Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.UpdateStoredInfoTypeRequest], - Awaitable[~.StoredInfoType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_stored_info_type' not in self._stubs: - self._stubs['update_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', - request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['update_stored_info_type'] - - @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - Awaitable[dlp.StoredInfoType]]: - r"""Return a callable for the get stored info type method over gRPC. - - Gets a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.GetStoredInfoTypeRequest], - Awaitable[~.StoredInfoType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_stored_info_type' not in self._stubs: - self._stubs['get_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', - request_serializer=dlp.GetStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['get_stored_info_type'] - - @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - Awaitable[dlp.ListStoredInfoTypesResponse]]: - r"""Return a callable for the list stored info types method over gRPC. - - Lists stored infoTypes. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.ListStoredInfoTypesRequest], - Awaitable[~.ListStoredInfoTypesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_stored_info_types' not in self._stubs: - self._stubs['list_stored_info_types'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', - request_serializer=dlp.ListStoredInfoTypesRequest.serialize, - response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, - ) - return self._stubs['list_stored_info_types'] - - @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete stored info type method over gRPC. - - Deletes a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.DeleteStoredInfoTypeRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_stored_info_type' not in self._stubs: - self._stubs['delete_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', - request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_stored_info_type'] - - @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - Awaitable[dlp.HybridInspectResponse]]: - r"""Return a callable for the hybrid inspect dlp job method over gRPC. - - Inspect hybrid content and store findings to a job. - To review the findings, inspect the job. Inspection will - occur asynchronously. - - Returns: - Callable[[~.HybridInspectDlpJobRequest], - Awaitable[~.HybridInspectResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_dlp_job' not in self._stubs: - self._stubs['hybrid_inspect_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', - request_serializer=dlp.HybridInspectDlpJobRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_dlp_job'] - - @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the finish dlp job method over gRPC. - - Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. - - Returns: - Callable[[~.FinishDlpJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'finish_dlp_job' not in self._stubs: - self._stubs['finish_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/FinishDlpJob', - request_serializer=dlp.FinishDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['finish_dlp_job'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'DlpServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py deleted file mode 100644 index 5bc3d949..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py +++ /dev/null @@ -1,390 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .dlp import ( - Action, - ActivateJobTriggerRequest, - AnalyzeDataSourceRiskDetails, - BoundingBox, - BucketingConfig, - ByteContentItem, - CancelDlpJobRequest, - CharacterMaskConfig, - CharsToIgnore, - Color, - Container, - ContentItem, - ContentLocation, - CreateDeidentifyTemplateRequest, - CreateDlpJobRequest, - CreateInspectTemplateRequest, - CreateJobTriggerRequest, - CreateStoredInfoTypeRequest, - CryptoDeterministicConfig, - CryptoHashConfig, - CryptoKey, - CryptoReplaceFfxFpeConfig, - DataProfileAction, - DataProfileConfigSnapshot, - DataProfileJobConfig, - DataProfileLocation, - DataProfilePubSubCondition, - DataProfilePubSubMessage, - DataRiskLevel, - DateShiftConfig, - DateTime, - DeidentifyConfig, - DeidentifyContentRequest, - DeidentifyContentResponse, - DeidentifyTemplate, - DeleteDeidentifyTemplateRequest, - DeleteDlpJobRequest, - DeleteInspectTemplateRequest, - DeleteJobTriggerRequest, - DeleteStoredInfoTypeRequest, - DlpJob, - DocumentLocation, - Error, - ExcludeByHotword, - ExcludeInfoTypes, - ExclusionRule, - FieldTransformation, - Finding, - FinishDlpJobRequest, - FixedSizeBucketingConfig, - GetDeidentifyTemplateRequest, - GetDlpJobRequest, - GetInspectTemplateRequest, - GetJobTriggerRequest, - GetStoredInfoTypeRequest, - HybridContentItem, - HybridFindingDetails, - HybridInspectDlpJobRequest, - HybridInspectJobTriggerRequest, - HybridInspectResponse, - HybridInspectStatistics, - ImageLocation, - ImageTransformations, - InfoTypeCategory, - InfoTypeDescription, - InfoTypeStats, - InfoTypeSummary, - InfoTypeTransformations, - InspectConfig, - InspectContentRequest, - InspectContentResponse, - InspectDataSourceDetails, - InspectionRule, - InspectionRuleSet, - InspectJobConfig, - InspectResult, - InspectTemplate, - JobTrigger, - KmsWrappedCryptoKey, - LargeCustomDictionaryConfig, - LargeCustomDictionaryStats, - ListDeidentifyTemplatesRequest, - ListDeidentifyTemplatesResponse, - ListDlpJobsRequest, - ListDlpJobsResponse, - ListInfoTypesRequest, - ListInfoTypesResponse, - ListInspectTemplatesRequest, - ListInspectTemplatesResponse, - ListJobTriggersRequest, - ListJobTriggersResponse, - ListStoredInfoTypesRequest, - ListStoredInfoTypesResponse, - Location, - Manual, - MetadataLocation, - OtherInfoTypeSummary, - OutputStorageConfig, - PrimitiveTransformation, - PrivacyMetric, - ProfileStatus, - QuasiId, - QuoteInfo, - Range, - RecordCondition, - RecordLocation, - RecordSuppression, - RecordTransformation, - RecordTransformations, - RedactConfig, - RedactImageRequest, - RedactImageResponse, - ReidentifyContentRequest, - ReidentifyContentResponse, - ReplaceDictionaryConfig, - ReplaceValueConfig, - ReplaceWithInfoTypeConfig, - RiskAnalysisJobConfig, - Schedule, - StatisticalTable, - StorageMetadataLabel, - StoredInfoType, - StoredInfoTypeConfig, - StoredInfoTypeStats, - StoredInfoTypeVersion, - Table, - TableDataProfile, - TableLocation, - TimePartConfig, - TransformationConfig, - TransformationDescription, - TransformationDetails, - TransformationDetailsStorageConfig, - TransformationErrorHandling, - TransformationLocation, - TransformationOverview, - TransformationResultStatus, - TransformationSummary, - TransientCryptoKey, - UnwrappedCryptoKey, - UpdateDeidentifyTemplateRequest, - UpdateInspectTemplateRequest, - UpdateJobTriggerRequest, - UpdateStoredInfoTypeRequest, - Value, - ValueFrequency, - VersionDescription, - ContentOption, - DlpJobType, - EncryptionStatus, - InfoTypeSupportedBy, - MatchingType, - MetadataType, - RelationalOperator, - ResourceVisibility, - StoredInfoTypeState, - TransformationContainerType, - TransformationResultStatusType, - TransformationType, -) -from .storage import ( - BigQueryField, - BigQueryKey, - BigQueryOptions, - BigQueryTable, - CloudStorageFileSet, - CloudStorageOptions, - CloudStoragePath, - CloudStorageRegexFileSet, - CustomInfoType, - DatastoreKey, - DatastoreOptions, - EntityId, - FieldId, - HybridOptions, - InfoType, - Key, - KindExpression, - PartitionId, - RecordKey, - SensitivityScore, - StorageConfig, - StoredType, - TableOptions, - FileType, - Likelihood, -) - -__all__ = ( - 'Action', - 'ActivateJobTriggerRequest', - 'AnalyzeDataSourceRiskDetails', - 'BoundingBox', - 'BucketingConfig', - 'ByteContentItem', - 'CancelDlpJobRequest', - 'CharacterMaskConfig', - 'CharsToIgnore', - 'Color', - 'Container', - 'ContentItem', - 'ContentLocation', - 'CreateDeidentifyTemplateRequest', - 'CreateDlpJobRequest', - 'CreateInspectTemplateRequest', - 'CreateJobTriggerRequest', - 'CreateStoredInfoTypeRequest', - 'CryptoDeterministicConfig', - 'CryptoHashConfig', - 'CryptoKey', - 'CryptoReplaceFfxFpeConfig', - 'DataProfileAction', - 'DataProfileConfigSnapshot', - 'DataProfileJobConfig', - 'DataProfileLocation', - 'DataProfilePubSubCondition', - 'DataProfilePubSubMessage', - 'DataRiskLevel', - 'DateShiftConfig', - 'DateTime', - 'DeidentifyConfig', - 'DeidentifyContentRequest', - 'DeidentifyContentResponse', - 'DeidentifyTemplate', - 'DeleteDeidentifyTemplateRequest', - 'DeleteDlpJobRequest', - 'DeleteInspectTemplateRequest', - 'DeleteJobTriggerRequest', - 'DeleteStoredInfoTypeRequest', - 'DlpJob', - 'DocumentLocation', - 'Error', - 'ExcludeByHotword', - 'ExcludeInfoTypes', - 'ExclusionRule', - 'FieldTransformation', - 'Finding', - 'FinishDlpJobRequest', - 'FixedSizeBucketingConfig', - 'GetDeidentifyTemplateRequest', - 'GetDlpJobRequest', - 'GetInspectTemplateRequest', - 'GetJobTriggerRequest', - 'GetStoredInfoTypeRequest', - 'HybridContentItem', - 'HybridFindingDetails', - 'HybridInspectDlpJobRequest', - 'HybridInspectJobTriggerRequest', - 'HybridInspectResponse', - 'HybridInspectStatistics', - 'ImageLocation', - 'ImageTransformations', - 'InfoTypeCategory', - 'InfoTypeDescription', - 'InfoTypeStats', - 'InfoTypeSummary', - 'InfoTypeTransformations', - 'InspectConfig', - 'InspectContentRequest', - 'InspectContentResponse', - 'InspectDataSourceDetails', - 'InspectionRule', - 'InspectionRuleSet', - 'InspectJobConfig', - 'InspectResult', - 'InspectTemplate', - 'JobTrigger', - 'KmsWrappedCryptoKey', - 'LargeCustomDictionaryConfig', - 'LargeCustomDictionaryStats', - 'ListDeidentifyTemplatesRequest', - 'ListDeidentifyTemplatesResponse', - 'ListDlpJobsRequest', - 'ListDlpJobsResponse', - 'ListInfoTypesRequest', - 'ListInfoTypesResponse', - 'ListInspectTemplatesRequest', - 'ListInspectTemplatesResponse', - 'ListJobTriggersRequest', - 'ListJobTriggersResponse', - 'ListStoredInfoTypesRequest', - 'ListStoredInfoTypesResponse', - 'Location', - 'Manual', - 'MetadataLocation', - 'OtherInfoTypeSummary', - 'OutputStorageConfig', - 'PrimitiveTransformation', - 'PrivacyMetric', - 'ProfileStatus', - 'QuasiId', - 'QuoteInfo', - 'Range', - 'RecordCondition', - 'RecordLocation', - 'RecordSuppression', - 'RecordTransformation', - 'RecordTransformations', - 'RedactConfig', - 'RedactImageRequest', - 'RedactImageResponse', - 'ReidentifyContentRequest', - 'ReidentifyContentResponse', - 'ReplaceDictionaryConfig', - 'ReplaceValueConfig', - 'ReplaceWithInfoTypeConfig', - 'RiskAnalysisJobConfig', - 'Schedule', - 'StatisticalTable', - 'StorageMetadataLabel', - 'StoredInfoType', - 'StoredInfoTypeConfig', - 'StoredInfoTypeStats', - 'StoredInfoTypeVersion', - 'Table', - 'TableDataProfile', - 'TableLocation', - 'TimePartConfig', - 'TransformationConfig', - 'TransformationDescription', - 'TransformationDetails', - 'TransformationDetailsStorageConfig', - 'TransformationErrorHandling', - 'TransformationLocation', - 'TransformationOverview', - 'TransformationResultStatus', - 'TransformationSummary', - 'TransientCryptoKey', - 'UnwrappedCryptoKey', - 'UpdateDeidentifyTemplateRequest', - 'UpdateInspectTemplateRequest', - 'UpdateJobTriggerRequest', - 'UpdateStoredInfoTypeRequest', - 'Value', - 'ValueFrequency', - 'VersionDescription', - 'ContentOption', - 'DlpJobType', - 'EncryptionStatus', - 'InfoTypeSupportedBy', - 'MatchingType', - 'MetadataType', - 'RelationalOperator', - 'ResourceVisibility', - 'StoredInfoTypeState', - 'TransformationContainerType', - 'TransformationResultStatusType', - 'TransformationType', - 'BigQueryField', - 'BigQueryKey', - 'BigQueryOptions', - 'BigQueryTable', - 'CloudStorageFileSet', - 'CloudStorageOptions', - 'CloudStoragePath', - 'CloudStorageRegexFileSet', - 'CustomInfoType', - 'DatastoreKey', - 'DatastoreOptions', - 'EntityId', - 'FieldId', - 'HybridOptions', - 'InfoType', - 'Key', - 'KindExpression', - 'PartitionId', - 'RecordKey', - 'SensitivityScore', - 'StorageConfig', - 'StoredType', - 'TableOptions', - 'FileType', - 'Likelihood', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py deleted file mode 100644 index d82444a2..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py +++ /dev/null @@ -1,8846 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dlp_v2.types import storage -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import date_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore -from google.type import timeofday_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.privacy.dlp.v2', - manifest={ - 'TransformationResultStatusType', - 'TransformationContainerType', - 'TransformationType', - 'RelationalOperator', - 'MatchingType', - 'ContentOption', - 'MetadataType', - 'InfoTypeSupportedBy', - 'DlpJobType', - 'StoredInfoTypeState', - 'ResourceVisibility', - 'EncryptionStatus', - 'ExcludeInfoTypes', - 'ExcludeByHotword', - 'ExclusionRule', - 'InspectionRule', - 'InspectionRuleSet', - 'InspectConfig', - 'ByteContentItem', - 'ContentItem', - 'Table', - 'InspectResult', - 'Finding', - 'Location', - 'ContentLocation', - 'MetadataLocation', - 'StorageMetadataLabel', - 'DocumentLocation', - 'RecordLocation', - 'TableLocation', - 'Container', - 'Range', - 'ImageLocation', - 'BoundingBox', - 'RedactImageRequest', - 'Color', - 'RedactImageResponse', - 'DeidentifyContentRequest', - 'DeidentifyContentResponse', - 'ReidentifyContentRequest', - 'ReidentifyContentResponse', - 'InspectContentRequest', - 'InspectContentResponse', - 'OutputStorageConfig', - 'InfoTypeStats', - 'InspectDataSourceDetails', - 'HybridInspectStatistics', - 'InfoTypeDescription', - 'InfoTypeCategory', - 'VersionDescription', - 'ListInfoTypesRequest', - 'ListInfoTypesResponse', - 'RiskAnalysisJobConfig', - 'QuasiId', - 'StatisticalTable', - 'PrivacyMetric', - 'AnalyzeDataSourceRiskDetails', - 'ValueFrequency', - 'Value', - 'QuoteInfo', - 'DateTime', - 'DeidentifyConfig', - 'ImageTransformations', - 'TransformationErrorHandling', - 'PrimitiveTransformation', - 'TimePartConfig', - 'CryptoHashConfig', - 'CryptoDeterministicConfig', - 'ReplaceValueConfig', - 'ReplaceDictionaryConfig', - 'ReplaceWithInfoTypeConfig', - 'RedactConfig', - 'CharsToIgnore', - 'CharacterMaskConfig', - 'FixedSizeBucketingConfig', - 'BucketingConfig', - 'CryptoReplaceFfxFpeConfig', - 'CryptoKey', - 'TransientCryptoKey', - 'UnwrappedCryptoKey', - 'KmsWrappedCryptoKey', - 'DateShiftConfig', - 'InfoTypeTransformations', - 'FieldTransformation', - 'RecordTransformations', - 'RecordSuppression', - 'RecordCondition', - 'TransformationOverview', - 'TransformationSummary', - 'TransformationDescription', - 'TransformationDetails', - 'TransformationLocation', - 'RecordTransformation', - 'TransformationResultStatus', - 'TransformationDetailsStorageConfig', - 'Schedule', - 'Manual', - 'InspectTemplate', - 'DeidentifyTemplate', - 'Error', - 'JobTrigger', - 'Action', - 'TransformationConfig', - 'CreateInspectTemplateRequest', - 'UpdateInspectTemplateRequest', - 'GetInspectTemplateRequest', - 'ListInspectTemplatesRequest', - 'ListInspectTemplatesResponse', - 'DeleteInspectTemplateRequest', - 'CreateJobTriggerRequest', - 'ActivateJobTriggerRequest', - 'UpdateJobTriggerRequest', - 'GetJobTriggerRequest', - 'CreateDlpJobRequest', - 'ListJobTriggersRequest', - 'ListJobTriggersResponse', - 'DeleteJobTriggerRequest', - 'InspectJobConfig', - 'DataProfileAction', - 'DataProfileJobConfig', - 'DataProfileLocation', - 'DlpJob', - 'GetDlpJobRequest', - 'ListDlpJobsRequest', - 'ListDlpJobsResponse', - 'CancelDlpJobRequest', - 'FinishDlpJobRequest', - 'DeleteDlpJobRequest', - 'CreateDeidentifyTemplateRequest', - 'UpdateDeidentifyTemplateRequest', - 'GetDeidentifyTemplateRequest', - 'ListDeidentifyTemplatesRequest', - 'ListDeidentifyTemplatesResponse', - 'DeleteDeidentifyTemplateRequest', - 'LargeCustomDictionaryConfig', - 'LargeCustomDictionaryStats', - 'StoredInfoTypeConfig', - 'StoredInfoTypeStats', - 'StoredInfoTypeVersion', - 'StoredInfoType', - 'CreateStoredInfoTypeRequest', - 'UpdateStoredInfoTypeRequest', - 'GetStoredInfoTypeRequest', - 'ListStoredInfoTypesRequest', - 'ListStoredInfoTypesResponse', - 'DeleteStoredInfoTypeRequest', - 'HybridInspectJobTriggerRequest', - 'HybridInspectDlpJobRequest', - 'HybridContentItem', - 'HybridFindingDetails', - 'HybridInspectResponse', - 'DataRiskLevel', - 'DataProfileConfigSnapshot', - 'TableDataProfile', - 'ProfileStatus', - 'InfoTypeSummary', - 'OtherInfoTypeSummary', - 'DataProfilePubSubCondition', - 'DataProfilePubSubMessage', - }, -) - - -class TransformationResultStatusType(proto.Enum): - r"""Enum of possible outcomes of transformations. SUCCESS if - transformation and storing of transformation was successful, - otherwise, reason for not transforming. - - Values: - STATE_TYPE_UNSPECIFIED (0): - No description available. - INVALID_TRANSFORM (1): - This will be set when a finding could not be - transformed (i.e. outside user set bucket - range). - BIGQUERY_MAX_ROW_SIZE_EXCEEDED (2): - This will be set when a BigQuery - transformation was successful but could not be - stored back in BigQuery because the transformed - row exceeds BigQuery's max row size. - METADATA_UNRETRIEVABLE (3): - This will be set when there is a finding in - the custom metadata of a file, but at the write - time of the transformed file, this key / value - pair is unretrievable. - SUCCESS (4): - This will be set when the transformation and - storing of it is successful. - """ - STATE_TYPE_UNSPECIFIED = 0 - INVALID_TRANSFORM = 1 - BIGQUERY_MAX_ROW_SIZE_EXCEEDED = 2 - METADATA_UNRETRIEVABLE = 3 - SUCCESS = 4 - - -class TransformationContainerType(proto.Enum): - r"""Describes functionality of a given container in its original - format. - - Values: - TRANSFORM_UNKNOWN_CONTAINER (0): - No description available. - TRANSFORM_BODY (1): - No description available. - TRANSFORM_METADATA (2): - No description available. - TRANSFORM_TABLE (3): - No description available. - """ - TRANSFORM_UNKNOWN_CONTAINER = 0 - TRANSFORM_BODY = 1 - TRANSFORM_METADATA = 2 - TRANSFORM_TABLE = 3 - - -class TransformationType(proto.Enum): - r"""An enum of rules that can be used to transform a value. Can be a - record suppression, or one of the transformation rules specified - under ``PrimitiveTransformation``. - - Values: - TRANSFORMATION_TYPE_UNSPECIFIED (0): - Unused - RECORD_SUPPRESSION (1): - Record suppression - REPLACE_VALUE (2): - Replace value - REPLACE_DICTIONARY (15): - Replace value using a dictionary. - REDACT (3): - Redact - CHARACTER_MASK (4): - Character mask - CRYPTO_REPLACE_FFX_FPE (5): - FFX-FPE - FIXED_SIZE_BUCKETING (6): - Fixed size bucketing - BUCKETING (7): - Bucketing - REPLACE_WITH_INFO_TYPE (8): - Replace with info type - TIME_PART (9): - Time part - CRYPTO_HASH (10): - Crypto hash - DATE_SHIFT (12): - Date shift - CRYPTO_DETERMINISTIC_CONFIG (13): - Deterministic crypto - REDACT_IMAGE (14): - Redact image - """ - TRANSFORMATION_TYPE_UNSPECIFIED = 0 - RECORD_SUPPRESSION = 1 - REPLACE_VALUE = 2 - REPLACE_DICTIONARY = 15 - REDACT = 3 - CHARACTER_MASK = 4 - CRYPTO_REPLACE_FFX_FPE = 5 - FIXED_SIZE_BUCKETING = 6 - BUCKETING = 7 - REPLACE_WITH_INFO_TYPE = 8 - TIME_PART = 9 - CRYPTO_HASH = 10 - DATE_SHIFT = 12 - CRYPTO_DETERMINISTIC_CONFIG = 13 - REDACT_IMAGE = 14 - - -class RelationalOperator(proto.Enum): - r"""Operators available for comparing the value of fields. - - Values: - RELATIONAL_OPERATOR_UNSPECIFIED (0): - Unused - EQUAL_TO (1): - Equal. Attempts to match even with - incompatible types. - NOT_EQUAL_TO (2): - Not equal to. Attempts to match even with - incompatible types. - GREATER_THAN (3): - Greater than. - LESS_THAN (4): - Less than. - GREATER_THAN_OR_EQUALS (5): - Greater than or equals. - LESS_THAN_OR_EQUALS (6): - Less than or equals. - EXISTS (7): - Exists - """ - RELATIONAL_OPERATOR_UNSPECIFIED = 0 - EQUAL_TO = 1 - NOT_EQUAL_TO = 2 - GREATER_THAN = 3 - LESS_THAN = 4 - GREATER_THAN_OR_EQUALS = 5 - LESS_THAN_OR_EQUALS = 6 - EXISTS = 7 - - -class MatchingType(proto.Enum): - r"""Type of the match which can be applied to different ways of - matching, like Dictionary, regular expression and intersecting - with findings of another info type. - - Values: - MATCHING_TYPE_UNSPECIFIED (0): - Invalid. - MATCHING_TYPE_FULL_MATCH (1): - Full match. - - Dictionary: join of Dictionary results matched - complete finding quote - Regex: all regex - matches fill a finding quote start to end - - Exclude info type: completely inside affecting - info types findings - MATCHING_TYPE_PARTIAL_MATCH (2): - Partial match. - - Dictionary: at least one of the tokens in the - finding matches - Regex: substring of the - finding matches - - Exclude info type: intersects with affecting - info types findings - MATCHING_TYPE_INVERSE_MATCH (3): - Inverse match. - - Dictionary: no tokens in the finding match the - dictionary - Regex: finding doesn't match the - regex - - Exclude info type: no intersection with - affecting info types findings - """ - MATCHING_TYPE_UNSPECIFIED = 0 - MATCHING_TYPE_FULL_MATCH = 1 - MATCHING_TYPE_PARTIAL_MATCH = 2 - MATCHING_TYPE_INVERSE_MATCH = 3 - - -class ContentOption(proto.Enum): - r"""Deprecated and unused. - - Values: - CONTENT_UNSPECIFIED (0): - Includes entire content of a file or a data - stream. - CONTENT_TEXT (1): - Text content within the data, excluding any - metadata. - CONTENT_IMAGE (2): - Images found in the data. - """ - CONTENT_UNSPECIFIED = 0 - CONTENT_TEXT = 1 - CONTENT_IMAGE = 2 - - -class MetadataType(proto.Enum): - r"""Type of metadata containing the finding. - - Values: - METADATATYPE_UNSPECIFIED (0): - Unused - STORAGE_METADATA (2): - General file metadata provided by Cloud - Storage. - """ - METADATATYPE_UNSPECIFIED = 0 - STORAGE_METADATA = 2 - - -class InfoTypeSupportedBy(proto.Enum): - r"""Parts of the APIs which use certain infoTypes. - - Values: - ENUM_TYPE_UNSPECIFIED (0): - Unused. - INSPECT (1): - Supported by the inspect operations. - RISK_ANALYSIS (2): - Supported by the risk analysis operations. - """ - ENUM_TYPE_UNSPECIFIED = 0 - INSPECT = 1 - RISK_ANALYSIS = 2 - - -class DlpJobType(proto.Enum): - r"""An enum to represent the various types of DLP jobs. - - Values: - DLP_JOB_TYPE_UNSPECIFIED (0): - Defaults to INSPECT_JOB. - INSPECT_JOB (1): - The job inspected Google Cloud for sensitive - data. - RISK_ANALYSIS_JOB (2): - The job executed a Risk Analysis computation. - """ - DLP_JOB_TYPE_UNSPECIFIED = 0 - INSPECT_JOB = 1 - RISK_ANALYSIS_JOB = 2 - - -class StoredInfoTypeState(proto.Enum): - r"""State of a StoredInfoType version. - - Values: - STORED_INFO_TYPE_STATE_UNSPECIFIED (0): - Unused - PENDING (1): - StoredInfoType version is being created. - READY (2): - StoredInfoType version is ready for use. - FAILED (3): - StoredInfoType creation failed. All relevant error messages - are returned in the ``StoredInfoTypeVersion`` message. - INVALID (4): - StoredInfoType is no longer valid because artifacts stored - in user-controlled storage were modified. To fix an invalid - StoredInfoType, use the ``UpdateStoredInfoType`` method to - create a new version. - """ - STORED_INFO_TYPE_STATE_UNSPECIFIED = 0 - PENDING = 1 - READY = 2 - FAILED = 3 - INVALID = 4 - - -class ResourceVisibility(proto.Enum): - r"""How broadly a resource has been shared. New items may be - added over time. A higher number means more restricted. - - Values: - RESOURCE_VISIBILITY_UNSPECIFIED (0): - Unused. - RESOURCE_VISIBILITY_PUBLIC (10): - Visible to any user. - RESOURCE_VISIBILITY_RESTRICTED (20): - Visible only to specific users. - """ - RESOURCE_VISIBILITY_UNSPECIFIED = 0 - RESOURCE_VISIBILITY_PUBLIC = 10 - RESOURCE_VISIBILITY_RESTRICTED = 20 - - -class EncryptionStatus(proto.Enum): - r"""How a resource is encrypted. - - Values: - ENCRYPTION_STATUS_UNSPECIFIED (0): - Unused. - ENCRYPTION_GOOGLE_MANAGED (1): - Google manages server-side encryption keys on - your behalf. - ENCRYPTION_CUSTOMER_MANAGED (2): - Customer provides the key. - """ - ENCRYPTION_STATUS_UNSPECIFIED = 0 - ENCRYPTION_GOOGLE_MANAGED = 1 - ENCRYPTION_CUSTOMER_MANAGED = 2 - - -class ExcludeInfoTypes(proto.Message): - r"""List of excluded infoTypes. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - InfoType list in ExclusionRule rule drops a finding when it - overlaps or contained within with a finding of an infoType - from this list. For example, for - ``InspectionRuleSet.info_types`` containing - "PHONE_NUMBER"``and``\ exclusion_rule\ ``containing``\ exclude_info_types.info_types\` - with "EMAIL_ADDRESS" the phone number findings are dropped - if they overlap with EMAIL_ADDRESS finding. That leads to - "555-222-2222@example.org" to generate only a single - finding, namely email address. - """ - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - - -class ExcludeByHotword(proto.Message): - r"""The rule to exclude findings based on a hotword. For record - inspection of tables, column names are considered hotwords. An - example of this is to exclude a finding if a BigQuery column - matches a specific pattern. - - Attributes: - hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression pattern defining what - qualifies as a hotword. - proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): - Range of characters within which the entire - hotword must reside. The total length of the - window cannot exceed 1000 characters. The - windowBefore property in proximity should be set - to 1 if the hotword needs to be included in a - column header. - """ - - hotword_regex: storage.CustomInfoType.Regex = proto.Field( - proto.MESSAGE, - number=1, - message=storage.CustomInfoType.Regex, - ) - proximity: storage.CustomInfoType.DetectionRule.Proximity = proto.Field( - proto.MESSAGE, - number=2, - message=storage.CustomInfoType.DetectionRule.Proximity, - ) - - -class ExclusionRule(proto.Message): - r"""The rule that specifies conditions when findings of infoTypes - specified in ``InspectionRuleSet`` are removed from results. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): - Dictionary which defines the rule. - - This field is a member of `oneof`_ ``type``. - regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression which defines the rule. - - This field is a member of `oneof`_ ``type``. - exclude_info_types (google.cloud.dlp_v2.types.ExcludeInfoTypes): - Set of infoTypes for which findings would - affect this rule. - - This field is a member of `oneof`_ ``type``. - exclude_by_hotword (google.cloud.dlp_v2.types.ExcludeByHotword): - Drop if the hotword rule is contained in the - proximate context. For tabular data, the context - includes the column name. - - This field is a member of `oneof`_ ``type``. - matching_type (google.cloud.dlp_v2.types.MatchingType): - How the rule is applied, see MatchingType - documentation for details. - """ - - dictionary: storage.CustomInfoType.Dictionary = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.CustomInfoType.Dictionary, - ) - regex: storage.CustomInfoType.Regex = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message=storage.CustomInfoType.Regex, - ) - exclude_info_types: 'ExcludeInfoTypes' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='ExcludeInfoTypes', - ) - exclude_by_hotword: 'ExcludeByHotword' = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message='ExcludeByHotword', - ) - matching_type: 'MatchingType' = proto.Field( - proto.ENUM, - number=4, - enum='MatchingType', - ) - - -class InspectionRule(proto.Message): - r"""A single inspection rule to be applied to infoTypes, specified in - ``InspectionRuleSet``. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): - Hotword-based detection rule. - - This field is a member of `oneof`_ ``type``. - exclusion_rule (google.cloud.dlp_v2.types.ExclusionRule): - Exclusion rule. - - This field is a member of `oneof`_ ``type``. - """ - - hotword_rule: storage.CustomInfoType.DetectionRule.HotwordRule = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.CustomInfoType.DetectionRule.HotwordRule, - ) - exclusion_rule: 'ExclusionRule' = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message='ExclusionRule', - ) - - -class InspectionRuleSet(proto.Message): - r"""Rule set for modifying a set of infoTypes to alter behavior - under certain circumstances, depending on the specific details - of the rules within the set. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - List of infoTypes this rule set is applied - to. - rules (MutableSequence[google.cloud.dlp_v2.types.InspectionRule]): - Set of rules to be applied to infoTypes. The - rules are applied in order. - """ - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - rules: MutableSequence['InspectionRule'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='InspectionRule', - ) - - -class InspectConfig(proto.Message): - r"""Configuration description of the scanning process. When used with - redactContent only info_types and min_likelihood are currently used. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - Restricts what info_types to look for. The values must - correspond to InfoType values returned by ListInfoTypes or - listed at - https://cloud.google.com/dlp/docs/infotypes-reference. - - When no InfoTypes or CustomInfoTypes are specified in a - request, the system may automatically choose what detectors - to run. By default this may be all types, but may change - over time as detectors are updated. - - If you need precise control and predictability as to what - detectors are run you should specify specific InfoTypes - listed in the reference, otherwise a default list will be - used, which may change over time. - min_likelihood (google.cloud.dlp_v2.types.Likelihood): - Only returns findings equal or above this - threshold. The default is POSSIBLE. - See https://cloud.google.com/dlp/docs/likelihood - to learn more. - limits (google.cloud.dlp_v2.types.InspectConfig.FindingLimits): - Configuration to control the number of findings returned. - This is not used for data profiling. - - When redacting sensitive data from images, finding limits - don't apply. They can cause unexpected or inconsistent - results, where only some data is redacted. Don't include - finding limits in - [RedactImage][google.privacy.dlp.v2.DlpService.RedactImage] - requests. Otherwise, Cloud DLP returns an error. - include_quote (bool): - When true, a contextual quote from the data that triggered a - finding is included in the response; see - [Finding.quote][google.privacy.dlp.v2.Finding.quote]. This - is not used for data profiling. - exclude_info_types (bool): - When true, excludes type information of the - findings. This is not used for data profiling. - custom_info_types (MutableSequence[google.cloud.dlp_v2.types.CustomInfoType]): - CustomInfoTypes provided by the user. See - https://cloud.google.com/dlp/docs/creating-custom-infotypes - to learn more. - content_options (MutableSequence[google.cloud.dlp_v2.types.ContentOption]): - Deprecated and unused. - rule_set (MutableSequence[google.cloud.dlp_v2.types.InspectionRuleSet]): - Set of rules to apply to the findings for - this InspectConfig. Exclusion rules, contained - in the set are executed in the end, other rules - are executed in the order they are specified for - each info type. - """ - - class FindingLimits(proto.Message): - r"""Configuration to control the number of findings returned for - inspection. This is not used for de-identification or data - profiling. - - When redacting sensitive data from images, finding limits don't - apply. They can cause unexpected or inconsistent results, where only - some data is redacted. Don't include finding limits in - [RedactImage][google.privacy.dlp.v2.DlpService.RedactImage] - requests. Otherwise, Cloud DLP returns an error. - - Attributes: - max_findings_per_item (int): - Max number of findings that will be returned for each item - scanned. When set within ``InspectJobConfig``, the maximum - returned is 2000 regardless if this is set higher. When set - within ``InspectContentRequest``, this field is ignored. - max_findings_per_request (int): - Max number of findings that will be returned per - request/job. When set within ``InspectContentRequest``, the - maximum returned is 2000 regardless if this is set higher. - max_findings_per_info_type (MutableSequence[google.cloud.dlp_v2.types.InspectConfig.FindingLimits.InfoTypeLimit]): - Configuration of findings limit given for - specified infoTypes. - """ - - class InfoTypeLimit(proto.Message): - r"""Max findings configuration per infoType, per content item or - long running DlpJob. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - Type of information the findings limit applies to. Only one - limit per info_type should be provided. If InfoTypeLimit - does not have an info_type, the DLP API applies the limit - against all info_types that are found but not specified in - another InfoTypeLimit. - max_findings (int): - Max findings limit for the given infoType. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - max_findings: int = proto.Field( - proto.INT32, - number=2, - ) - - max_findings_per_item: int = proto.Field( - proto.INT32, - number=1, - ) - max_findings_per_request: int = proto.Field( - proto.INT32, - number=2, - ) - max_findings_per_info_type: MutableSequence['InspectConfig.FindingLimits.InfoTypeLimit'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='InspectConfig.FindingLimits.InfoTypeLimit', - ) - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - min_likelihood: storage.Likelihood = proto.Field( - proto.ENUM, - number=2, - enum=storage.Likelihood, - ) - limits: FindingLimits = proto.Field( - proto.MESSAGE, - number=3, - message=FindingLimits, - ) - include_quote: bool = proto.Field( - proto.BOOL, - number=4, - ) - exclude_info_types: bool = proto.Field( - proto.BOOL, - number=5, - ) - custom_info_types: MutableSequence[storage.CustomInfoType] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message=storage.CustomInfoType, - ) - content_options: MutableSequence['ContentOption'] = proto.RepeatedField( - proto.ENUM, - number=8, - enum='ContentOption', - ) - rule_set: MutableSequence['InspectionRuleSet'] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='InspectionRuleSet', - ) - - -class ByteContentItem(proto.Message): - r"""Container for bytes to inspect or redact. - - Attributes: - type_ (google.cloud.dlp_v2.types.ByteContentItem.BytesType): - The type of data stored in the bytes string. Default will be - TEXT_UTF8. - data (bytes): - Content data to inspect or redact. - """ - class BytesType(proto.Enum): - r"""The type of data being sent for inspection. To learn more, see - `Supported file - types `__. - - Values: - BYTES_TYPE_UNSPECIFIED (0): - Unused - IMAGE (6): - Any image type. - IMAGE_JPEG (1): - jpeg - IMAGE_BMP (2): - bmp - IMAGE_PNG (3): - png - IMAGE_SVG (4): - svg - TEXT_UTF8 (5): - plain text - WORD_DOCUMENT (7): - docx, docm, dotx, dotm - PDF (8): - pdf - POWERPOINT_DOCUMENT (9): - pptx, pptm, potx, potm, pot - EXCEL_DOCUMENT (10): - xlsx, xlsm, xltx, xltm - AVRO (11): - avro - CSV (12): - csv - TSV (13): - tsv - """ - BYTES_TYPE_UNSPECIFIED = 0 - IMAGE = 6 - IMAGE_JPEG = 1 - IMAGE_BMP = 2 - IMAGE_PNG = 3 - IMAGE_SVG = 4 - TEXT_UTF8 = 5 - WORD_DOCUMENT = 7 - PDF = 8 - POWERPOINT_DOCUMENT = 9 - EXCEL_DOCUMENT = 10 - AVRO = 11 - CSV = 12 - TSV = 13 - - type_: BytesType = proto.Field( - proto.ENUM, - number=1, - enum=BytesType, - ) - data: bytes = proto.Field( - proto.BYTES, - number=2, - ) - - -class ContentItem(proto.Message): - r""" - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - value (str): - String data to inspect or redact. - - This field is a member of `oneof`_ ``data_item``. - table (google.cloud.dlp_v2.types.Table): - Structured content for inspection. See - https://cloud.google.com/dlp/docs/inspecting-text#inspecting_a_table - to learn more. - - This field is a member of `oneof`_ ``data_item``. - byte_item (google.cloud.dlp_v2.types.ByteContentItem): - Content data to inspect or redact. Replaces ``type`` and - ``data``. - - This field is a member of `oneof`_ ``data_item``. - """ - - value: str = proto.Field( - proto.STRING, - number=3, - oneof='data_item', - ) - table: 'Table' = proto.Field( - proto.MESSAGE, - number=4, - oneof='data_item', - message='Table', - ) - byte_item: 'ByteContentItem' = proto.Field( - proto.MESSAGE, - number=5, - oneof='data_item', - message='ByteContentItem', - ) - - -class Table(proto.Message): - r"""Structured content to inspect. Up to 50,000 ``Value``\ s per request - allowed. See - https://cloud.google.com/dlp/docs/inspecting-structured-text#inspecting_a_table - to learn more. - - Attributes: - headers (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Headers of the table. - rows (MutableSequence[google.cloud.dlp_v2.types.Table.Row]): - Rows of the table. - """ - - class Row(proto.Message): - r"""Values of the row. - - Attributes: - values (MutableSequence[google.cloud.dlp_v2.types.Value]): - Individual cells. - """ - - values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - - headers: MutableSequence[storage.FieldId] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - rows: MutableSequence[Row] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=Row, - ) - - -class InspectResult(proto.Message): - r"""All the findings for a single scanned item. - - Attributes: - findings (MutableSequence[google.cloud.dlp_v2.types.Finding]): - List of findings for an item. - findings_truncated (bool): - If true, then this item might have more - findings than were returned, and the findings - returned are an arbitrary subset of all - findings. The findings list might be truncated - because the input items were too large, or - because the server reached the maximum amount of - resources allowed for a single API call. For - best results, divide the input into smaller - batches. - """ - - findings: MutableSequence['Finding'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Finding', - ) - findings_truncated: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -class Finding(proto.Message): - r"""Represents a piece of potentially sensitive content. - - Attributes: - name (str): - Resource name in format - projects/{project}/locations/{location}/findings/{finding} - Populated only when viewing persisted findings. - quote (str): - The content that was found. Even if the content is not - textual, it may be converted to a textual representation - here. Provided if ``include_quote`` is true and the finding - is less than or equal to 4096 bytes long. If the finding - exceeds 4096 bytes in length, the quote may be omitted. - info_type (google.cloud.dlp_v2.types.InfoType): - The type of content that might have been found. Provided if - ``excluded_types`` is false. - likelihood (google.cloud.dlp_v2.types.Likelihood): - Confidence of how likely it is that the ``info_type`` is - correct. - location (google.cloud.dlp_v2.types.Location): - Where the content was found. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp when finding was detected. - quote_info (google.cloud.dlp_v2.types.QuoteInfo): - Contains data parsed from quotes. Only populated if - include_quote was set to true and a supported infoType was - requested. Currently supported infoTypes: DATE, - DATE_OF_BIRTH and TIME. - resource_name (str): - The job that stored the finding. - trigger_name (str): - Job trigger name, if applicable, for this - finding. - labels (MutableMapping[str, str]): - The labels associated with this ``Finding``. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 10 labels can be associated with a given - finding. - - Examples: - - - ``"environment" : "production"`` - - ``"pipeline" : "etl"`` - job_create_time (google.protobuf.timestamp_pb2.Timestamp): - Time the job started that produced this - finding. - job_name (str): - The job that stored the finding. - finding_id (str): - The unique finding id. - """ - - name: str = proto.Field( - proto.STRING, - number=14, - ) - quote: str = proto.Field( - proto.STRING, - number=1, - ) - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=2, - message=storage.InfoType, - ) - likelihood: storage.Likelihood = proto.Field( - proto.ENUM, - number=3, - enum=storage.Likelihood, - ) - location: 'Location' = proto.Field( - proto.MESSAGE, - number=4, - message='Location', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - quote_info: 'QuoteInfo' = proto.Field( - proto.MESSAGE, - number=7, - message='QuoteInfo', - ) - resource_name: str = proto.Field( - proto.STRING, - number=8, - ) - trigger_name: str = proto.Field( - proto.STRING, - number=9, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=10, - ) - job_create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - job_name: str = proto.Field( - proto.STRING, - number=13, - ) - finding_id: str = proto.Field( - proto.STRING, - number=15, - ) - - -class Location(proto.Message): - r"""Specifies the location of the finding. - - Attributes: - byte_range (google.cloud.dlp_v2.types.Range): - Zero-based byte offsets delimiting the - finding. These are relative to the finding's - containing element. Note that when the content - is not textual, this references the UTF-8 - encoded textual representation of the content. - Omitted if content is an image. - codepoint_range (google.cloud.dlp_v2.types.Range): - Unicode character offsets delimiting the - finding. These are relative to the finding's - containing element. Provided when the content is - text. - content_locations (MutableSequence[google.cloud.dlp_v2.types.ContentLocation]): - List of nested objects pointing to the - precise location of the finding within the file - or record. - container (google.cloud.dlp_v2.types.Container): - Information about the container where this - finding occurred, if available. - """ - - byte_range: 'Range' = proto.Field( - proto.MESSAGE, - number=1, - message='Range', - ) - codepoint_range: 'Range' = proto.Field( - proto.MESSAGE, - number=2, - message='Range', - ) - content_locations: MutableSequence['ContentLocation'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='ContentLocation', - ) - container: 'Container' = proto.Field( - proto.MESSAGE, - number=8, - message='Container', - ) - - -class ContentLocation(proto.Message): - r"""Precise location of the finding within a document, record, - image, or metadata container. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - container_name (str): - Name of the container where the finding is located. The top - level name is the source file name or table name. Names of - some common storage containers are formatted as follows: - - - BigQuery tables: ``{project_id}:{dataset_id}.{table_id}`` - - Cloud Storage files: ``gs://{bucket}/{path}`` - - Datastore namespace: {namespace} - - Nested names could be absent if the embedded object has no - string identifier (for example, an image contained within a - document). - record_location (google.cloud.dlp_v2.types.RecordLocation): - Location within a row or record of a database - table. - - This field is a member of `oneof`_ ``location``. - image_location (google.cloud.dlp_v2.types.ImageLocation): - Location within an image's pixels. - - This field is a member of `oneof`_ ``location``. - document_location (google.cloud.dlp_v2.types.DocumentLocation): - Location data for document files. - - This field is a member of `oneof`_ ``location``. - metadata_location (google.cloud.dlp_v2.types.MetadataLocation): - Location within the metadata for inspected - content. - - This field is a member of `oneof`_ ``location``. - container_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Finding container modification timestamp, if applicable. For - Cloud Storage, this field contains the last file - modification timestamp. For a BigQuery table, this field - contains the last_modified_time property. For Datastore, - this field isn't populated. - container_version (str): - Finding container version, if available - ("generation" for Cloud Storage). - """ - - container_name: str = proto.Field( - proto.STRING, - number=1, - ) - record_location: 'RecordLocation' = proto.Field( - proto.MESSAGE, - number=2, - oneof='location', - message='RecordLocation', - ) - image_location: 'ImageLocation' = proto.Field( - proto.MESSAGE, - number=3, - oneof='location', - message='ImageLocation', - ) - document_location: 'DocumentLocation' = proto.Field( - proto.MESSAGE, - number=5, - oneof='location', - message='DocumentLocation', - ) - metadata_location: 'MetadataLocation' = proto.Field( - proto.MESSAGE, - number=8, - oneof='location', - message='MetadataLocation', - ) - container_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - container_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class MetadataLocation(proto.Message): - r"""Metadata Location - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - type_ (google.cloud.dlp_v2.types.MetadataType): - Type of metadata containing the finding. - storage_label (google.cloud.dlp_v2.types.StorageMetadataLabel): - Storage metadata. - - This field is a member of `oneof`_ ``label``. - """ - - type_: 'MetadataType' = proto.Field( - proto.ENUM, - number=1, - enum='MetadataType', - ) - storage_label: 'StorageMetadataLabel' = proto.Field( - proto.MESSAGE, - number=3, - oneof='label', - message='StorageMetadataLabel', - ) - - -class StorageMetadataLabel(proto.Message): - r"""Storage metadata label to indicate which metadata entry - contains findings. - - Attributes: - key (str): - - """ - - key: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DocumentLocation(proto.Message): - r"""Location of a finding within a document. - - Attributes: - file_offset (int): - Offset of the line, from the beginning of the - file, where the finding is located. - """ - - file_offset: int = proto.Field( - proto.INT64, - number=1, - ) - - -class RecordLocation(proto.Message): - r"""Location of a finding within a row or record. - - Attributes: - record_key (google.cloud.dlp_v2.types.RecordKey): - Key of the finding. - field_id (google.cloud.dlp_v2.types.FieldId): - Field id of the field containing the finding. - table_location (google.cloud.dlp_v2.types.TableLocation): - Location within a ``ContentItem.Table``. - """ - - record_key: storage.RecordKey = proto.Field( - proto.MESSAGE, - number=1, - message=storage.RecordKey, - ) - field_id: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - table_location: 'TableLocation' = proto.Field( - proto.MESSAGE, - number=3, - message='TableLocation', - ) - - -class TableLocation(proto.Message): - r"""Location of a finding within a table. - - Attributes: - row_index (int): - The zero-based index of the row where the finding is - located. Only populated for resources that have a natural - ordering, not BigQuery. In BigQuery, to identify the row a - finding came from, populate - BigQueryOptions.identifying_fields with your primary key - column names and when you store the findings the value of - those columns will be stored inside of Finding. - """ - - row_index: int = proto.Field( - proto.INT64, - number=1, - ) - - -class Container(proto.Message): - r"""Represents a container that may contain DLP findings. - Examples of a container include a file, table, or database - record. - - Attributes: - type_ (str): - Container type, for example BigQuery or Cloud - Storage. - project_id (str): - Project where the finding was found. - Can be different from the project that owns the - finding. - full_path (str): - A string representation of the full container - name. Examples: - - BigQuery: 'Project:DataSetId.TableId' - - Cloud Storage: - 'gs://Bucket/folders/filename.txt' - root_path (str): - The root of the container. Examples: - - - For BigQuery table ``project_id:dataset_id.table_id``, - the root is ``dataset_id`` - - For Cloud Storage file - ``gs://bucket/folder/filename.txt``, the root is - ``gs://bucket`` - relative_path (str): - The rest of the path after the root. Examples: - - - For BigQuery table ``project_id:dataset_id.table_id``, - the relative path is ``table_id`` - - For Cloud Storage file - ``gs://bucket/folder/filename.txt``, the relative path is - ``folder/filename.txt`` - update_time (google.protobuf.timestamp_pb2.Timestamp): - Findings container modification timestamp, if applicable. - For Cloud Storage, this field contains the last file - modification timestamp. For a BigQuery table, this field - contains the last_modified_time property. For Datastore, - this field isn't populated. - version (str): - Findings container version, if available - ("generation" for Cloud Storage). - """ - - type_: str = proto.Field( - proto.STRING, - number=1, - ) - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - full_path: str = proto.Field( - proto.STRING, - number=3, - ) - root_path: str = proto.Field( - proto.STRING, - number=4, - ) - relative_path: str = proto.Field( - proto.STRING, - number=5, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class Range(proto.Message): - r"""Generic half-open interval [start, end) - - Attributes: - start (int): - Index of the first character of the range - (inclusive). - end (int): - Index of the last character of the range - (exclusive). - """ - - start: int = proto.Field( - proto.INT64, - number=1, - ) - end: int = proto.Field( - proto.INT64, - number=2, - ) - - -class ImageLocation(proto.Message): - r"""Location of the finding within an image. - - Attributes: - bounding_boxes (MutableSequence[google.cloud.dlp_v2.types.BoundingBox]): - Bounding boxes locating the pixels within the - image containing the finding. - """ - - bounding_boxes: MutableSequence['BoundingBox'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='BoundingBox', - ) - - -class BoundingBox(proto.Message): - r"""Bounding box encompassing detected text within an image. - - Attributes: - top (int): - Top coordinate of the bounding box. (0,0) is - upper left. - left (int): - Left coordinate of the bounding box. (0,0) is - upper left. - width (int): - Width of the bounding box in pixels. - height (int): - Height of the bounding box in pixels. - """ - - top: int = proto.Field( - proto.INT32, - number=1, - ) - left: int = proto.Field( - proto.INT32, - number=2, - ) - width: int = proto.Field( - proto.INT32, - number=3, - ) - height: int = proto.Field( - proto.INT32, - number=4, - ) - - -class RedactImageRequest(proto.Message): - r"""Request to search for potentially sensitive info in an image - and redact it by covering it with a colored rectangle. - - Attributes: - parent (str): - Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - location_id (str): - Deprecated. This field has no effect. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. - image_redaction_configs (MutableSequence[google.cloud.dlp_v2.types.RedactImageRequest.ImageRedactionConfig]): - The configuration for specifying what content - to redact from images. - include_findings (bool): - Whether the response should include findings - along with the redacted image. - byte_item (google.cloud.dlp_v2.types.ByteContentItem): - The content must be PNG, JPEG, SVG or BMP. - """ - - class ImageRedactionConfig(proto.Message): - r"""Configuration for determining how redaction of images should - occur. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - Only one per info_type should be provided per request. If - not specified, and redact_all_text is false, the DLP API - will redact all text that it matches against all info_types - that are found, but not specified in another - ImageRedactionConfig. - - This field is a member of `oneof`_ ``target``. - redact_all_text (bool): - If true, all text found in the image, regardless whether it - matches an info_type, is redacted. Only one should be - provided. - - This field is a member of `oneof`_ ``target``. - redaction_color (google.cloud.dlp_v2.types.Color): - The color to use when redacting content from - an image. If not specified, the default is - black. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - oneof='target', - message=storage.InfoType, - ) - redact_all_text: bool = proto.Field( - proto.BOOL, - number=2, - oneof='target', - ) - redaction_color: 'Color' = proto.Field( - proto.MESSAGE, - number=3, - message='Color', - ) - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - location_id: str = proto.Field( - proto.STRING, - number=8, - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - image_redaction_configs: MutableSequence[ImageRedactionConfig] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=ImageRedactionConfig, - ) - include_findings: bool = proto.Field( - proto.BOOL, - number=6, - ) - byte_item: 'ByteContentItem' = proto.Field( - proto.MESSAGE, - number=7, - message='ByteContentItem', - ) - - -class Color(proto.Message): - r"""Represents a color in the RGB color space. - - Attributes: - red (float): - The amount of red in the color as a value in the interval - [0, 1]. - green (float): - The amount of green in the color as a value in the interval - [0, 1]. - blue (float): - The amount of blue in the color as a value in the interval - [0, 1]. - """ - - red: float = proto.Field( - proto.FLOAT, - number=1, - ) - green: float = proto.Field( - proto.FLOAT, - number=2, - ) - blue: float = proto.Field( - proto.FLOAT, - number=3, - ) - - -class RedactImageResponse(proto.Message): - r"""Results of redacting an image. - - Attributes: - redacted_image (bytes): - The redacted image. The type will be the same - as the original image. - extracted_text (str): - If an image was being inspected and the InspectConfig's - include_quote was set to true, then this field will include - all text, if any, that was found in the image. - inspect_result (google.cloud.dlp_v2.types.InspectResult): - The findings. Populated when include_findings in the request - is true. - """ - - redacted_image: bytes = proto.Field( - proto.BYTES, - number=1, - ) - extracted_text: str = proto.Field( - proto.STRING, - number=2, - ) - inspect_result: 'InspectResult' = proto.Field( - proto.MESSAGE, - number=3, - message='InspectResult', - ) - - -class DeidentifyContentRequest(proto.Message): - r"""Request to de-identify a ContentItem. - - Attributes: - parent (str): - Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): - Configuration for the de-identification of the content item. - Items specified here will override the template referenced - by the deidentify_template_name argument. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. Items specified here will - override the template referenced by the - inspect_template_name argument. - item (google.cloud.dlp_v2.types.ContentItem): - The item to de-identify. Will be treated as text. - - This value must be of type - [Table][google.privacy.dlp.v2.Table] if your - [deidentify_config][google.privacy.dlp.v2.DeidentifyContentRequest.deidentify_config] - is a - [RecordTransformations][google.privacy.dlp.v2.RecordTransformations] - object. - inspect_template_name (str): - Template to use. Any configuration directly specified in - inspect_config will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - deidentify_template_name (str): - Template to use. Any configuration directly specified in - deidentify_config will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - deidentify_config: 'DeidentifyConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyConfig', - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='InspectConfig', - ) - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=4, - message='ContentItem', - ) - inspect_template_name: str = proto.Field( - proto.STRING, - number=5, - ) - deidentify_template_name: str = proto.Field( - proto.STRING, - number=6, - ) - location_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -class DeidentifyContentResponse(proto.Message): - r"""Results of de-identifying a ContentItem. - - Attributes: - item (google.cloud.dlp_v2.types.ContentItem): - The de-identified item. - overview (google.cloud.dlp_v2.types.TransformationOverview): - An overview of the changes that were made on the ``item``. - """ - - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=1, - message='ContentItem', - ) - overview: 'TransformationOverview' = proto.Field( - proto.MESSAGE, - number=2, - message='TransformationOverview', - ) - - -class ReidentifyContentRequest(proto.Message): - r"""Request to re-identify an item. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - reidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): - Configuration for the re-identification of the content item. - This field shares the same proto message type that is used - for de-identification, however its usage here is for the - reversal of the previous de-identification. - Re-identification is performed by examining the - transformations used to de-identify the items and executing - the reverse. This requires that only reversible - transformations be provided here. The reversible - transformations are: - - - ``CryptoDeterministicConfig`` - - ``CryptoReplaceFfxFpeConfig`` - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. - item (google.cloud.dlp_v2.types.ContentItem): - The item to re-identify. Will be treated as - text. - inspect_template_name (str): - Template to use. Any configuration directly specified in - ``inspect_config`` will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - reidentify_template_name (str): - Template to use. References an instance of - ``DeidentifyTemplate``. Any configuration directly specified - in ``reidentify_config`` or ``inspect_config`` will override - those set in the template. The ``DeidentifyTemplate`` used - must include only reversible transformations. Singular - fields that are set in this request will replace their - corresponding fields in the template. Repeated fields are - appended. Singular sub-messages and groups are recursively - merged. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - reidentify_config: 'DeidentifyConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyConfig', - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='InspectConfig', - ) - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=4, - message='ContentItem', - ) - inspect_template_name: str = proto.Field( - proto.STRING, - number=5, - ) - reidentify_template_name: str = proto.Field( - proto.STRING, - number=6, - ) - location_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -class ReidentifyContentResponse(proto.Message): - r"""Results of re-identifying an item. - - Attributes: - item (google.cloud.dlp_v2.types.ContentItem): - The re-identified item. - overview (google.cloud.dlp_v2.types.TransformationOverview): - An overview of the changes that were made to the ``item``. - """ - - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=1, - message='ContentItem', - ) - overview: 'TransformationOverview' = proto.Field( - proto.MESSAGE, - number=2, - message='TransformationOverview', - ) - - -class InspectContentRequest(proto.Message): - r"""Request to search for potentially sensitive info in a - ContentItem. - - Attributes: - parent (str): - Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. What specified here will - override the template referenced by the - inspect_template_name argument. - item (google.cloud.dlp_v2.types.ContentItem): - The item to inspect. - inspect_template_name (str): - Template to use. Any configuration directly specified in - inspect_config will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=3, - message='ContentItem', - ) - inspect_template_name: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class InspectContentResponse(proto.Message): - r"""Results of inspecting an item. - - Attributes: - result (google.cloud.dlp_v2.types.InspectResult): - The findings. - """ - - result: 'InspectResult' = proto.Field( - proto.MESSAGE, - number=1, - message='InspectResult', - ) - - -class OutputStorageConfig(proto.Message): - r"""Cloud repository for storing output. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Store findings in an existing table or a new table in an - existing dataset. If table_id is not set a new one will be - generated for you with the following format: - dlp_googleapis_yyyy_mm_dd_[dlp_job_id]. Pacific time zone - will be used for generating the date details. - - For Inspect, each column in an existing output table must - have the same name, type, and mode of a field in the - ``Finding`` object. - - For Risk, an existing output table should be the output of a - previous Risk analysis job run on the same source table, - with the same privacy metric and quasi-identifiers. Risk - jobs that analyze the same table but compute a different - privacy metric, or use different sets of quasi-identifiers, - cannot store their results in the same table. - - This field is a member of `oneof`_ ``type``. - output_schema (google.cloud.dlp_v2.types.OutputStorageConfig.OutputSchema): - Schema used for writing the findings for Inspect jobs. This - field is only used for Inspect and must be unspecified for - Risk jobs. Columns are derived from the ``Finding`` object. - If appending to an existing table, any columns from the - predefined schema that are missing will be added. No columns - in the existing table will be deleted. - - If unspecified, then all available columns will be used for - a new table or an (existing) table with no schema, and no - changes will be made to an existing table that has a schema. - Only for use with external storage. - """ - class OutputSchema(proto.Enum): - r"""Predefined schemas for storing findings. - Only for use with external storage. - - Values: - OUTPUT_SCHEMA_UNSPECIFIED (0): - Unused. - BASIC_COLUMNS (1): - Basic schema including only ``info_type``, ``quote``, - ``certainty``, and ``timestamp``. - GCS_COLUMNS (2): - Schema tailored to findings from scanning - Cloud Storage. - DATASTORE_COLUMNS (3): - Schema tailored to findings from scanning - Google Datastore. - BIG_QUERY_COLUMNS (4): - Schema tailored to findings from scanning - Google BigQuery. - ALL_COLUMNS (5): - Schema containing all columns. - """ - OUTPUT_SCHEMA_UNSPECIFIED = 0 - BASIC_COLUMNS = 1 - GCS_COLUMNS = 2 - DATASTORE_COLUMNS = 3 - BIG_QUERY_COLUMNS = 4 - ALL_COLUMNS = 5 - - table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.BigQueryTable, - ) - output_schema: OutputSchema = proto.Field( - proto.ENUM, - number=3, - enum=OutputSchema, - ) - - -class InfoTypeStats(proto.Message): - r"""Statistics regarding a specific InfoType. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - The type of finding this stat is for. - count (int): - Number of findings for this infoType. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - count: int = proto.Field( - proto.INT64, - number=2, - ) - - -class InspectDataSourceDetails(proto.Message): - r"""The results of an inspect DataSource job. - - Attributes: - requested_options (google.cloud.dlp_v2.types.InspectDataSourceDetails.RequestedOptions): - The configuration used for this job. - result (google.cloud.dlp_v2.types.InspectDataSourceDetails.Result): - A summary of the outcome of this inspection - job. - """ - - class RequestedOptions(proto.Message): - r"""Snapshot of the inspection configuration. - - Attributes: - snapshot_inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - If run with an InspectTemplate, a snapshot of - its state at the time of this run. - job_config (google.cloud.dlp_v2.types.InspectJobConfig): - Inspect config. - """ - - snapshot_inspect_template: 'InspectTemplate' = proto.Field( - proto.MESSAGE, - number=1, - message='InspectTemplate', - ) - job_config: 'InspectJobConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='InspectJobConfig', - ) - - class Result(proto.Message): - r"""All result fields mentioned below are updated while the job - is processing. - - Attributes: - processed_bytes (int): - Total size in bytes that were processed. - total_estimated_bytes (int): - Estimate of the number of bytes to process. - info_type_stats (MutableSequence[google.cloud.dlp_v2.types.InfoTypeStats]): - Statistics of how many instances of each info - type were found during inspect job. - hybrid_stats (google.cloud.dlp_v2.types.HybridInspectStatistics): - Statistics related to the processing of - hybrid inspect. - """ - - processed_bytes: int = proto.Field( - proto.INT64, - number=1, - ) - total_estimated_bytes: int = proto.Field( - proto.INT64, - number=2, - ) - info_type_stats: MutableSequence['InfoTypeStats'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='InfoTypeStats', - ) - hybrid_stats: 'HybridInspectStatistics' = proto.Field( - proto.MESSAGE, - number=7, - message='HybridInspectStatistics', - ) - - requested_options: RequestedOptions = proto.Field( - proto.MESSAGE, - number=2, - message=RequestedOptions, - ) - result: Result = proto.Field( - proto.MESSAGE, - number=3, - message=Result, - ) - - -class HybridInspectStatistics(proto.Message): - r"""Statistics related to processing hybrid inspect requests. - - Attributes: - processed_count (int): - The number of hybrid inspection requests - processed within this job. - aborted_count (int): - The number of hybrid inspection requests - aborted because the job ran out of quota or was - ended before they could be processed. - pending_count (int): - The number of hybrid requests currently being processed. - Only populated when called via method ``getDlpJob``. A burst - of traffic may cause hybrid inspect requests to be enqueued. - Processing will take place as quickly as possible, but - resource limitations may impact how long a request is - enqueued for. - """ - - processed_count: int = proto.Field( - proto.INT64, - number=1, - ) - aborted_count: int = proto.Field( - proto.INT64, - number=2, - ) - pending_count: int = proto.Field( - proto.INT64, - number=3, - ) - - -class InfoTypeDescription(proto.Message): - r"""InfoType description. - - Attributes: - name (str): - Internal name of the infoType. - display_name (str): - Human readable form of the infoType name. - supported_by (MutableSequence[google.cloud.dlp_v2.types.InfoTypeSupportedBy]): - Which parts of the API supports this - InfoType. - description (str): - Description of the infotype. Translated when - language is provided in the request. - versions (MutableSequence[google.cloud.dlp_v2.types.VersionDescription]): - A list of available versions for the - infotype. - categories (MutableSequence[google.cloud.dlp_v2.types.InfoTypeCategory]): - The category of the infoType. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - supported_by: MutableSequence['InfoTypeSupportedBy'] = proto.RepeatedField( - proto.ENUM, - number=3, - enum='InfoTypeSupportedBy', - ) - description: str = proto.Field( - proto.STRING, - number=4, - ) - versions: MutableSequence['VersionDescription'] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message='VersionDescription', - ) - categories: MutableSequence['InfoTypeCategory'] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='InfoTypeCategory', - ) - - -class InfoTypeCategory(proto.Message): - r"""Classification of infoTypes to organize them according to - geographic location, industry, and data type. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - location_category (google.cloud.dlp_v2.types.InfoTypeCategory.LocationCategory): - The region or country that issued the ID or - document represented by the infoType. - - This field is a member of `oneof`_ ``category``. - industry_category (google.cloud.dlp_v2.types.InfoTypeCategory.IndustryCategory): - The group of relevant businesses where this - infoType is commonly used - - This field is a member of `oneof`_ ``category``. - type_category (google.cloud.dlp_v2.types.InfoTypeCategory.TypeCategory): - The class of identifiers where this infoType - belongs - - This field is a member of `oneof`_ ``category``. - """ - class LocationCategory(proto.Enum): - r"""Enum of the current locations. - We might add more locations in the future. - - Values: - LOCATION_UNSPECIFIED (0): - Unused location - GLOBAL (1): - The infoType is not issued by or tied to a - specific region, but is used almost everywhere. - ARGENTINA (2): - The infoType is typically used in Argentina. - AUSTRALIA (3): - The infoType is typically used in Australia. - BELGIUM (4): - The infoType is typically used in Belgium. - BRAZIL (5): - The infoType is typically used in Brazil. - CANADA (6): - The infoType is typically used in Canada. - CHILE (7): - The infoType is typically used in Chile. - CHINA (8): - The infoType is typically used in China. - COLOMBIA (9): - The infoType is typically used in Colombia. - DENMARK (10): - The infoType is typically used in Denmark. - FRANCE (11): - The infoType is typically used in France. - FINLAND (12): - The infoType is typically used in Finland. - GERMANY (13): - The infoType is typically used in Germany. - HONG_KONG (14): - The infoType is typically used in Hong Kong. - INDIA (15): - The infoType is typically used in India. - INDONESIA (16): - The infoType is typically used in Indonesia. - IRELAND (17): - The infoType is typically used in Ireland. - ISRAEL (18): - The infoType is typically used in Israel. - ITALY (19): - The infoType is typically used in Italy. - JAPAN (20): - The infoType is typically used in Japan. - KOREA (21): - The infoType is typically used in Korea. - MEXICO (22): - The infoType is typically used in Mexico. - THE_NETHERLANDS (23): - The infoType is typically used in the - Netherlands. - NORWAY (24): - The infoType is typically used in Norway. - PARAGUAY (25): - The infoType is typically used in Paraguay. - PERU (26): - The infoType is typically used in Peru. - POLAND (27): - The infoType is typically used in Poland. - PORTUGAL (28): - The infoType is typically used in Portugal. - SINGAPORE (29): - The infoType is typically used in Singapore. - SOUTH_AFRICA (30): - The infoType is typically used in South - Africa. - SPAIN (31): - The infoType is typically used in Spain. - SWEDEN (32): - The infoType is typically used in Sweden. - TAIWAN (33): - The infoType is typically used in Taiwan. - THAILAND (34): - The infoType is typically used in Thailand. - TURKEY (35): - The infoType is typically used in Turkey. - UNITED_KINGDOM (36): - The infoType is typically used in the United - Kingdom. - UNITED_STATES (37): - The infoType is typically used in the United - States. - URUGUAY (38): - The infoType is typically used in Uruguay. - VENEZUELA (39): - The infoType is typically used in Venezuela. - INTERNAL (40): - The infoType is typically used in Google - internally. - NEW_ZEALAND (41): - The infoType is typically used in New - Zealand. - """ - LOCATION_UNSPECIFIED = 0 - GLOBAL = 1 - ARGENTINA = 2 - AUSTRALIA = 3 - BELGIUM = 4 - BRAZIL = 5 - CANADA = 6 - CHILE = 7 - CHINA = 8 - COLOMBIA = 9 - DENMARK = 10 - FRANCE = 11 - FINLAND = 12 - GERMANY = 13 - HONG_KONG = 14 - INDIA = 15 - INDONESIA = 16 - IRELAND = 17 - ISRAEL = 18 - ITALY = 19 - JAPAN = 20 - KOREA = 21 - MEXICO = 22 - THE_NETHERLANDS = 23 - NORWAY = 24 - PARAGUAY = 25 - PERU = 26 - POLAND = 27 - PORTUGAL = 28 - SINGAPORE = 29 - SOUTH_AFRICA = 30 - SPAIN = 31 - SWEDEN = 32 - TAIWAN = 33 - THAILAND = 34 - TURKEY = 35 - UNITED_KINGDOM = 36 - UNITED_STATES = 37 - URUGUAY = 38 - VENEZUELA = 39 - INTERNAL = 40 - NEW_ZEALAND = 41 - - class IndustryCategory(proto.Enum): - r"""Enum of the current industries in the category. - We might add more industries in the future. - - Values: - INDUSTRY_UNSPECIFIED (0): - Unused industry - FINANCE (1): - The infoType is typically used in the finance - industry. - HEALTH (2): - The infoType is typically used in the health - industry. - TELECOMMUNICATIONS (3): - The infoType is typically used in the - telecommunications industry. - """ - INDUSTRY_UNSPECIFIED = 0 - FINANCE = 1 - HEALTH = 2 - TELECOMMUNICATIONS = 3 - - class TypeCategory(proto.Enum): - r"""Enum of the current types in the category. - We might add more types in the future. - - Values: - TYPE_UNSPECIFIED (0): - Unused type - PII (1): - Personally identifiable information, for - example, a name or phone number - SPII (2): - Personally identifiable information that is - especially sensitive, for example, a passport - number. - DEMOGRAPHIC (3): - Attributes that can partially identify - someone, especially in combination with other - attributes, like age, height, and gender. - CREDENTIAL (4): - Confidential or secret information, for - example, a password. - GOVERNMENT_ID (5): - An identification document issued by a - government. - DOCUMENT (6): - A document, for example, a resume or source - code. - CONTEXTUAL_INFORMATION (7): - Information that is not sensitive on its own, - but provides details about the circumstances - surrounding an entity or an event. - """ - TYPE_UNSPECIFIED = 0 - PII = 1 - SPII = 2 - DEMOGRAPHIC = 3 - CREDENTIAL = 4 - GOVERNMENT_ID = 5 - DOCUMENT = 6 - CONTEXTUAL_INFORMATION = 7 - - location_category: LocationCategory = proto.Field( - proto.ENUM, - number=1, - oneof='category', - enum=LocationCategory, - ) - industry_category: IndustryCategory = proto.Field( - proto.ENUM, - number=2, - oneof='category', - enum=IndustryCategory, - ) - type_category: TypeCategory = proto.Field( - proto.ENUM, - number=3, - oneof='category', - enum=TypeCategory, - ) - - -class VersionDescription(proto.Message): - r"""Details about each available version for an infotype. - - Attributes: - version (str): - Name of the version - description (str): - Description of the version. - """ - - version: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListInfoTypesRequest(proto.Message): - r"""Request for the list of infoTypes. - - Attributes: - parent (str): - The parent resource name. - - The format of this value is as follows: - - :: - - locations/LOCATION_ID - language_code (str): - BCP-47 language code for localized infoType - friendly names. If omitted, or if localized - strings are not available, en-US strings will be - returned. - filter (str): - filter to only return infoTypes supported by certain parts - of the API. Defaults to supported_by=INSPECT. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=4, - ) - language_code: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - location_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListInfoTypesResponse(proto.Message): - r"""Response to the ListInfoTypes request. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoTypeDescription]): - Set of sensitive infoTypes. - """ - - info_types: MutableSequence['InfoTypeDescription'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='InfoTypeDescription', - ) - - -class RiskAnalysisJobConfig(proto.Message): - r"""Configuration for a risk analysis job. See - https://cloud.google.com/dlp/docs/concepts-risk-analysis to - learn more. - - Attributes: - privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): - Privacy metric to compute. - source_table (google.cloud.dlp_v2.types.BigQueryTable): - Input dataset to compute metrics over. - actions (MutableSequence[google.cloud.dlp_v2.types.Action]): - Actions to execute at the completion of the - job. Are executed in the order provided. - """ - - privacy_metric: 'PrivacyMetric' = proto.Field( - proto.MESSAGE, - number=1, - message='PrivacyMetric', - ) - source_table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=2, - message=storage.BigQueryTable, - ) - actions: MutableSequence['Action'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='Action', - ) - - -class QuasiId(proto.Message): - r"""A column with a semantic tag attached. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Required. Identifies the column. - info_type (google.cloud.dlp_v2.types.InfoType): - A column can be tagged with a InfoType to use the relevant - public dataset as a statistical model of population, if - available. We currently support US ZIP codes, region codes, - ages and genders. To programmatically obtain the list of - supported InfoTypes, use ListInfoTypes with the - supported_by=RISK_ANALYSIS filter. - - This field is a member of `oneof`_ ``tag``. - custom_tag (str): - A column can be tagged with a custom tag. In - this case, the user must indicate an auxiliary - table that contains statistical information on - the possible values of this column (below). - - This field is a member of `oneof`_ ``tag``. - inferred (google.protobuf.empty_pb2.Empty): - If no semantic tag is indicated, we infer the - statistical model from the distribution of - values in the input data - - This field is a member of `oneof`_ ``tag``. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=2, - oneof='tag', - message=storage.InfoType, - ) - custom_tag: str = proto.Field( - proto.STRING, - number=3, - oneof='tag', - ) - inferred: empty_pb2.Empty = proto.Field( - proto.MESSAGE, - number=4, - oneof='tag', - message=empty_pb2.Empty, - ) - - -class StatisticalTable(proto.Message): - r"""An auxiliary table containing statistical information on the - relative frequency of different quasi-identifiers values. It has - one or several quasi-identifiers columns, and one column that - indicates the relative frequency of each quasi-identifier tuple. - If a tuple is present in the data but not in the auxiliary - table, the corresponding relative frequency is assumed to be - zero (and thus, the tuple is highly reidentifiable). - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Required. Auxiliary table location. - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.StatisticalTable.QuasiIdentifierField]): - Required. Quasi-identifier columns. - relative_frequency (google.cloud.dlp_v2.types.FieldId): - Required. The relative frequency column must - contain a floating-point number between 0 and 1 - (inclusive). Null values are assumed to be zero. - """ - - class QuasiIdentifierField(proto.Message): - r"""A quasi-identifier column has a custom_tag, used to know which - column in the data corresponds to which column in the statistical - model. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Identifies the column. - custom_tag (str): - A column can be tagged with a custom tag. In - this case, the user must indicate an auxiliary - table that contains statistical information on - the possible values of this column (below). - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - custom_tag: str = proto.Field( - proto.STRING, - number=2, - ) - - table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=3, - message=storage.BigQueryTable, - ) - quasi_ids: MutableSequence[QuasiIdentifierField] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=QuasiIdentifierField, - ) - relative_frequency: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - - -class PrivacyMetric(proto.Message): - r"""Privacy metric to compute for reidentification risk analysis. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - numerical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.NumericalStatsConfig): - Numerical stats - - This field is a member of `oneof`_ ``type``. - categorical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.CategoricalStatsConfig): - Categorical stats - - This field is a member of `oneof`_ ``type``. - k_anonymity_config (google.cloud.dlp_v2.types.PrivacyMetric.KAnonymityConfig): - K-anonymity - - This field is a member of `oneof`_ ``type``. - l_diversity_config (google.cloud.dlp_v2.types.PrivacyMetric.LDiversityConfig): - l-diversity - - This field is a member of `oneof`_ ``type``. - k_map_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig): - k-map - - This field is a member of `oneof`_ ``type``. - delta_presence_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.DeltaPresenceEstimationConfig): - delta-presence - - This field is a member of `oneof`_ ``type``. - """ - - class NumericalStatsConfig(proto.Message): - r"""Compute numerical stats over an individual column, including - min, max, and quantiles. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Field to compute numerical stats on. - Supported types are integer, float, date, - datetime, timestamp, time. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - - class CategoricalStatsConfig(proto.Message): - r"""Compute numerical stats over an individual column, including - number of distinct values and value count distribution. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Field to compute categorical stats on. All - column types are supported except for arrays and - structs. However, it may be more informative to - use NumericalStats when the field type is - supported, depending on the data. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - - class KAnonymityConfig(proto.Message): - r"""k-anonymity metric, used for analysis of reidentification - risk. - - Attributes: - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Set of fields to compute k-anonymity over. - When multiple fields are specified, they are - considered a single composite key. Structs and - repeated data types are not supported; however, - nested fields are supported so long as they are - not structs themselves or nested within a - repeated field. - entity_id (google.cloud.dlp_v2.types.EntityId): - Message indicating that multiple rows might be associated to - a single individual. If the same entity_id is associated to - multiple quasi-identifier tuples over distinct rows, we - consider the entire collection of tuples as the composite - quasi-identifier. This collection is a multiset: the order - in which the different tuples appear in the dataset is - ignored, but their frequency is taken into account. - - Important note: a maximum of 1000 rows can be associated to - a single entity ID. If more rows are associated with the - same entity ID, some might be ignored. - """ - - quasi_ids: MutableSequence[storage.FieldId] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - entity_id: storage.EntityId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.EntityId, - ) - - class LDiversityConfig(proto.Message): - r"""l-diversity metric, used for analysis of reidentification - risk. - - Attributes: - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Set of quasi-identifiers indicating how - equivalence classes are defined for the - l-diversity computation. When multiple fields - are specified, they are considered a single - composite key. - sensitive_attribute (google.cloud.dlp_v2.types.FieldId): - Sensitive field for computing the l-value. - """ - - quasi_ids: MutableSequence[storage.FieldId] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - sensitive_attribute: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - - class KMapEstimationConfig(proto.Message): - r"""Reidentifiability metric. This corresponds to a risk model - similar to what is called "journalist risk" in the literature, - except the attack dataset is statistically modeled instead of - being perfectly known. This can be done using publicly available - data (like the US Census), or using a custom statistical model - (indicated as one or several BigQuery tables), or by - extrapolating from the distribution of values in the input - dataset. - - Attributes: - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.TaggedField]): - Required. Fields considered to be - quasi-identifiers. No two columns can have the - same tag. - region_code (str): - ISO 3166-1 alpha-2 region code to use in the statistical - modeling. Set if no column is tagged with a region-specific - InfoType (like US_ZIP_5) or a region code. - auxiliary_tables (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable]): - Several auxiliary tables can be used in the analysis. Each - custom_tag used to tag a quasi-identifiers column must - appear in exactly one column of one auxiliary table. - """ - - class TaggedField(proto.Message): - r"""A column with a semantic tag attached. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Required. Identifies the column. - info_type (google.cloud.dlp_v2.types.InfoType): - A column can be tagged with a InfoType to use the relevant - public dataset as a statistical model of population, if - available. We currently support US ZIP codes, region codes, - ages and genders. To programmatically obtain the list of - supported InfoTypes, use ListInfoTypes with the - supported_by=RISK_ANALYSIS filter. - - This field is a member of `oneof`_ ``tag``. - custom_tag (str): - A column can be tagged with a custom tag. In - this case, the user must indicate an auxiliary - table that contains statistical information on - the possible values of this column (below). - - This field is a member of `oneof`_ ``tag``. - inferred (google.protobuf.empty_pb2.Empty): - If no semantic tag is indicated, we infer the - statistical model from the distribution of - values in the input data - - This field is a member of `oneof`_ ``tag``. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=2, - oneof='tag', - message=storage.InfoType, - ) - custom_tag: str = proto.Field( - proto.STRING, - number=3, - oneof='tag', - ) - inferred: empty_pb2.Empty = proto.Field( - proto.MESSAGE, - number=4, - oneof='tag', - message=empty_pb2.Empty, - ) - - class AuxiliaryTable(proto.Message): - r"""An auxiliary table contains statistical information on the - relative frequency of different quasi-identifiers values. It has - one or several quasi-identifiers columns, and one column that - indicates the relative frequency of each quasi-identifier tuple. - If a tuple is present in the data but not in the auxiliary - table, the corresponding relative frequency is assumed to be - zero (and thus, the tuple is highly reidentifiable). - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Required. Auxiliary table location. - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField]): - Required. Quasi-identifier columns. - relative_frequency (google.cloud.dlp_v2.types.FieldId): - Required. The relative frequency column must - contain a floating-point number between 0 and 1 - (inclusive). Null values are assumed to be zero. - """ - - class QuasiIdField(proto.Message): - r"""A quasi-identifier column has a custom_tag, used to know which - column in the data corresponds to which column in the statistical - model. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Identifies the column. - custom_tag (str): - A auxiliary field. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - custom_tag: str = proto.Field( - proto.STRING, - number=2, - ) - - table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=3, - message=storage.BigQueryTable, - ) - quasi_ids: MutableSequence['PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField', - ) - relative_frequency: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - - quasi_ids: MutableSequence['PrivacyMetric.KMapEstimationConfig.TaggedField'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='PrivacyMetric.KMapEstimationConfig.TaggedField', - ) - region_code: str = proto.Field( - proto.STRING, - number=2, - ) - auxiliary_tables: MutableSequence['PrivacyMetric.KMapEstimationConfig.AuxiliaryTable'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable', - ) - - class DeltaPresenceEstimationConfig(proto.Message): - r"""δ-presence metric, used to estimate how likely it is for an - attacker to figure out that one given individual appears in a - de-identified dataset. Similarly to the k-map metric, we cannot - compute δ-presence exactly without knowing the attack dataset, - so we use a statistical model instead. - - Attributes: - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.QuasiId]): - Required. Fields considered to be - quasi-identifiers. No two fields can have the - same tag. - region_code (str): - ISO 3166-1 alpha-2 region code to use in the statistical - modeling. Set if no column is tagged with a region-specific - InfoType (like US_ZIP_5) or a region code. - auxiliary_tables (MutableSequence[google.cloud.dlp_v2.types.StatisticalTable]): - Several auxiliary tables can be used in the analysis. Each - custom_tag used to tag a quasi-identifiers field must appear - in exactly one field of one auxiliary table. - """ - - quasi_ids: MutableSequence['QuasiId'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='QuasiId', - ) - region_code: str = proto.Field( - proto.STRING, - number=2, - ) - auxiliary_tables: MutableSequence['StatisticalTable'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='StatisticalTable', - ) - - numerical_stats_config: NumericalStatsConfig = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=NumericalStatsConfig, - ) - categorical_stats_config: CategoricalStatsConfig = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message=CategoricalStatsConfig, - ) - k_anonymity_config: KAnonymityConfig = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message=KAnonymityConfig, - ) - l_diversity_config: LDiversityConfig = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message=LDiversityConfig, - ) - k_map_estimation_config: KMapEstimationConfig = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message=KMapEstimationConfig, - ) - delta_presence_estimation_config: DeltaPresenceEstimationConfig = proto.Field( - proto.MESSAGE, - number=6, - oneof='type', - message=DeltaPresenceEstimationConfig, - ) - - -class AnalyzeDataSourceRiskDetails(proto.Message): - r"""Result of a risk analysis operation request. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - requested_privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): - Privacy metric to compute. - requested_source_table (google.cloud.dlp_v2.types.BigQueryTable): - Input dataset to compute metrics over. - numerical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.NumericalStatsResult): - Numerical stats result - - This field is a member of `oneof`_ ``result``. - categorical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult): - Categorical stats result - - This field is a member of `oneof`_ ``result``. - k_anonymity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult): - K-anonymity result - - This field is a member of `oneof`_ ``result``. - l_diversity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult): - L-divesity result - - This field is a member of `oneof`_ ``result``. - k_map_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult): - K-map result - - This field is a member of `oneof`_ ``result``. - delta_presence_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult): - Delta-presence result - - This field is a member of `oneof`_ ``result``. - requested_options (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.RequestedRiskAnalysisOptions): - The configuration used for this job. - """ - - class NumericalStatsResult(proto.Message): - r"""Result of the numerical stats computation. - - Attributes: - min_value (google.cloud.dlp_v2.types.Value): - Minimum value appearing in the column. - max_value (google.cloud.dlp_v2.types.Value): - Maximum value appearing in the column. - quantile_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - List of 99 values that partition the set of - field values into 100 equal sized buckets. - """ - - min_value: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - max_value: 'Value' = proto.Field( - proto.MESSAGE, - number=2, - message='Value', - ) - quantile_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='Value', - ) - - class CategoricalStatsResult(proto.Message): - r"""Result of the categorical stats computation. - - Attributes: - value_frequency_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket]): - Histogram of value frequencies in the column. - """ - - class CategoricalStatsHistogramBucket(proto.Message): - r"""Histogram of value frequencies in the column. - - Attributes: - value_frequency_lower_bound (int): - Lower bound on the value frequency of the - values in this bucket. - value_frequency_upper_bound (int): - Upper bound on the value frequency of the - values in this bucket. - bucket_size (int): - Total number of values in this bucket. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.ValueFrequency]): - Sample of value frequencies in this bucket. - The total number of values returned per bucket - is capped at 20. - bucket_value_count (int): - Total number of distinct values in this - bucket. - """ - - value_frequency_lower_bound: int = proto.Field( - proto.INT64, - number=1, - ) - value_frequency_upper_bound: int = proto.Field( - proto.INT64, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=3, - ) - bucket_values: MutableSequence['ValueFrequency'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='ValueFrequency', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=5, - ) - - value_frequency_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket', - ) - - class KAnonymityResult(proto.Message): - r"""Result of the k-anonymity computation. - - Attributes: - equivalence_class_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket]): - Histogram of k-anonymity equivalence classes. - """ - - class KAnonymityEquivalenceClass(proto.Message): - r"""The set of columns' values that share the same ldiversity - value - - Attributes: - quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - Set of values defining the equivalence class. - One value per quasi-identifier column in the - original KAnonymity metric message. The order is - always the same as the original request. - equivalence_class_size (int): - Size of the equivalence class, for example - number of rows with the above set of values. - """ - - quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - equivalence_class_size: int = proto.Field( - proto.INT64, - number=2, - ) - - class KAnonymityHistogramBucket(proto.Message): - r"""Histogram of k-anonymity equivalence classes. - - Attributes: - equivalence_class_size_lower_bound (int): - Lower bound on the size of the equivalence - classes in this bucket. - equivalence_class_size_upper_bound (int): - Upper bound on the size of the equivalence - classes in this bucket. - bucket_size (int): - Total number of equivalence classes in this - bucket. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass]): - Sample of equivalence classes in this bucket. - The total number of classes returned per bucket - is capped at 20. - bucket_value_count (int): - Total number of distinct equivalence classes - in this bucket. - """ - - equivalence_class_size_lower_bound: int = proto.Field( - proto.INT64, - number=1, - ) - equivalence_class_size_upper_bound: int = proto.Field( - proto.INT64, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=3, - ) - bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=5, - ) - - equivalence_class_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket', - ) - - class LDiversityResult(proto.Message): - r"""Result of the l-diversity computation. - - Attributes: - sensitive_value_frequency_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket]): - Histogram of l-diversity equivalence class - sensitive value frequencies. - """ - - class LDiversityEquivalenceClass(proto.Message): - r"""The set of columns' values that share the same ldiversity - value. - - Attributes: - quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - Quasi-identifier values defining the - k-anonymity equivalence class. The order is - always the same as the original request. - equivalence_class_size (int): - Size of the k-anonymity equivalence class. - num_distinct_sensitive_values (int): - Number of distinct sensitive values in this - equivalence class. - top_sensitive_values (MutableSequence[google.cloud.dlp_v2.types.ValueFrequency]): - Estimated frequencies of top sensitive - values. - """ - - quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - equivalence_class_size: int = proto.Field( - proto.INT64, - number=2, - ) - num_distinct_sensitive_values: int = proto.Field( - proto.INT64, - number=3, - ) - top_sensitive_values: MutableSequence['ValueFrequency'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='ValueFrequency', - ) - - class LDiversityHistogramBucket(proto.Message): - r"""Histogram of l-diversity equivalence class sensitive value - frequencies. - - Attributes: - sensitive_value_frequency_lower_bound (int): - Lower bound on the sensitive value - frequencies of the equivalence classes in this - bucket. - sensitive_value_frequency_upper_bound (int): - Upper bound on the sensitive value - frequencies of the equivalence classes in this - bucket. - bucket_size (int): - Total number of equivalence classes in this - bucket. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass]): - Sample of equivalence classes in this bucket. - The total number of classes returned per bucket - is capped at 20. - bucket_value_count (int): - Total number of distinct equivalence classes - in this bucket. - """ - - sensitive_value_frequency_lower_bound: int = proto.Field( - proto.INT64, - number=1, - ) - sensitive_value_frequency_upper_bound: int = proto.Field( - proto.INT64, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=3, - ) - bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=5, - ) - - sensitive_value_frequency_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket', - ) - - class KMapEstimationResult(proto.Message): - r"""Result of the reidentifiability analysis. Note that these - results are an estimation, not exact values. - - Attributes: - k_map_estimation_histogram (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket]): - The intervals [min_anonymity, max_anonymity] do not overlap. - If a value doesn't correspond to any such interval, the - associated frequency is zero. For example, the following - records: {min_anonymity: 1, max_anonymity: 1, frequency: 17} - {min_anonymity: 2, max_anonymity: 3, frequency: 42} - {min_anonymity: 5, max_anonymity: 10, frequency: 99} mean - that there are no record with an estimated anonymity of 4, - 5, or larger than 10. - """ - - class KMapEstimationQuasiIdValues(proto.Message): - r"""A tuple of values for the quasi-identifier columns. - - Attributes: - quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - The quasi-identifier values. - estimated_anonymity (int): - The estimated anonymity for these - quasi-identifier values. - """ - - quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - estimated_anonymity: int = proto.Field( - proto.INT64, - number=2, - ) - - class KMapEstimationHistogramBucket(proto.Message): - r"""A KMapEstimationHistogramBucket message with the following values: - min_anonymity: 3 max_anonymity: 5 frequency: 42 means that there are - 42 records whose quasi-identifier values correspond to 3, 4 or 5 - people in the overlying population. An important particular case is - when min_anonymity = max_anonymity = 1: the frequency field then - corresponds to the number of uniquely identifiable records. - - Attributes: - min_anonymity (int): - Always positive. - max_anonymity (int): - Always greater than or equal to min_anonymity. - bucket_size (int): - Number of records within these anonymity - bounds. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues]): - Sample of quasi-identifier tuple values in - this bucket. The total number of classes - returned per bucket is capped at 20. - bucket_value_count (int): - Total number of distinct quasi-identifier - tuple values in this bucket. - """ - - min_anonymity: int = proto.Field( - proto.INT64, - number=1, - ) - max_anonymity: int = proto.Field( - proto.INT64, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=5, - ) - bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=7, - ) - - k_map_estimation_histogram: MutableSequence['AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket', - ) - - class DeltaPresenceEstimationResult(proto.Message): - r"""Result of the δ-presence computation. Note that these results - are an estimation, not exact values. - - Attributes: - delta_presence_estimation_histogram (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket]): - The intervals [min_probability, max_probability) do not - overlap. If a value doesn't correspond to any such interval, - the associated frequency is zero. For example, the following - records: {min_probability: 0, max_probability: 0.1, - frequency: 17} {min_probability: 0.2, max_probability: 0.3, - frequency: 42} {min_probability: 0.3, max_probability: 0.4, - frequency: 99} mean that there are no record with an - estimated probability in [0.1, 0.2) nor larger or equal to - 0.4. - """ - - class DeltaPresenceEstimationQuasiIdValues(proto.Message): - r"""A tuple of values for the quasi-identifier columns. - - Attributes: - quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - The quasi-identifier values. - estimated_probability (float): - The estimated probability that a given individual sharing - these quasi-identifier values is in the dataset. This value, - typically called δ, is the ratio between the number of - records in the dataset with these quasi-identifier values, - and the total number of individuals (inside *and* outside - the dataset) with these quasi-identifier values. For - example, if there are 15 individuals in the dataset who - share the same quasi-identifier values, and an estimated 100 - people in the entire population with these values, then δ is - 0.15. - """ - - quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - estimated_probability: float = proto.Field( - proto.DOUBLE, - number=2, - ) - - class DeltaPresenceEstimationHistogramBucket(proto.Message): - r"""A DeltaPresenceEstimationHistogramBucket message with the following - values: min_probability: 0.1 max_probability: 0.2 frequency: 42 - means that there are 42 records for which δ is in [0.1, 0.2). An - important particular case is when min_probability = max_probability - = 1: then, every individual who shares this quasi-identifier - combination is in the dataset. - - Attributes: - min_probability (float): - Between 0 and 1. - max_probability (float): - Always greater than or equal to min_probability. - bucket_size (int): - Number of records within these probability - bounds. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues]): - Sample of quasi-identifier tuple values in - this bucket. The total number of classes - returned per bucket is capped at 20. - bucket_value_count (int): - Total number of distinct quasi-identifier - tuple values in this bucket. - """ - - min_probability: float = proto.Field( - proto.DOUBLE, - number=1, - ) - max_probability: float = proto.Field( - proto.DOUBLE, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=5, - ) - bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=7, - ) - - delta_presence_estimation_histogram: MutableSequence['AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket', - ) - - class RequestedRiskAnalysisOptions(proto.Message): - r"""Risk analysis options. - - Attributes: - job_config (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): - The job config for the risk job. - """ - - job_config: 'RiskAnalysisJobConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='RiskAnalysisJobConfig', - ) - - requested_privacy_metric: 'PrivacyMetric' = proto.Field( - proto.MESSAGE, - number=1, - message='PrivacyMetric', - ) - requested_source_table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=2, - message=storage.BigQueryTable, - ) - numerical_stats_result: NumericalStatsResult = proto.Field( - proto.MESSAGE, - number=3, - oneof='result', - message=NumericalStatsResult, - ) - categorical_stats_result: CategoricalStatsResult = proto.Field( - proto.MESSAGE, - number=4, - oneof='result', - message=CategoricalStatsResult, - ) - k_anonymity_result: KAnonymityResult = proto.Field( - proto.MESSAGE, - number=5, - oneof='result', - message=KAnonymityResult, - ) - l_diversity_result: LDiversityResult = proto.Field( - proto.MESSAGE, - number=6, - oneof='result', - message=LDiversityResult, - ) - k_map_estimation_result: KMapEstimationResult = proto.Field( - proto.MESSAGE, - number=7, - oneof='result', - message=KMapEstimationResult, - ) - delta_presence_estimation_result: DeltaPresenceEstimationResult = proto.Field( - proto.MESSAGE, - number=9, - oneof='result', - message=DeltaPresenceEstimationResult, - ) - requested_options: RequestedRiskAnalysisOptions = proto.Field( - proto.MESSAGE, - number=10, - message=RequestedRiskAnalysisOptions, - ) - - -class ValueFrequency(proto.Message): - r"""A value of a field, including its frequency. - - Attributes: - value (google.cloud.dlp_v2.types.Value): - A value contained in the field in question. - count (int): - How many times the value is contained in the - field. - """ - - value: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - count: int = proto.Field( - proto.INT64, - number=2, - ) - - -class Value(proto.Message): - r"""Set of primitive values supported by the system. Note that for the - purposes of inspection or transformation, the number of bytes - considered to comprise a 'Value' is based on its representation as a - UTF-8 encoded string. For example, if 'integer_value' is set to - 123456789, the number of bytes would be counted as 9, even though an - int64 only holds up to 8 bytes of data. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - integer_value (int): - integer - - This field is a member of `oneof`_ ``type``. - float_value (float): - float - - This field is a member of `oneof`_ ``type``. - string_value (str): - string - - This field is a member of `oneof`_ ``type``. - boolean_value (bool): - boolean - - This field is a member of `oneof`_ ``type``. - timestamp_value (google.protobuf.timestamp_pb2.Timestamp): - timestamp - - This field is a member of `oneof`_ ``type``. - time_value (google.type.timeofday_pb2.TimeOfDay): - time of day - - This field is a member of `oneof`_ ``type``. - date_value (google.type.date_pb2.Date): - date - - This field is a member of `oneof`_ ``type``. - day_of_week_value (google.type.dayofweek_pb2.DayOfWeek): - day of week - - This field is a member of `oneof`_ ``type``. - """ - - integer_value: int = proto.Field( - proto.INT64, - number=1, - oneof='type', - ) - float_value: float = proto.Field( - proto.DOUBLE, - number=2, - oneof='type', - ) - string_value: str = proto.Field( - proto.STRING, - number=3, - oneof='type', - ) - boolean_value: bool = proto.Field( - proto.BOOL, - number=4, - oneof='type', - ) - timestamp_value: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message=timestamp_pb2.Timestamp, - ) - time_value: timeofday_pb2.TimeOfDay = proto.Field( - proto.MESSAGE, - number=6, - oneof='type', - message=timeofday_pb2.TimeOfDay, - ) - date_value: date_pb2.Date = proto.Field( - proto.MESSAGE, - number=7, - oneof='type', - message=date_pb2.Date, - ) - day_of_week_value: dayofweek_pb2.DayOfWeek = proto.Field( - proto.ENUM, - number=8, - oneof='type', - enum=dayofweek_pb2.DayOfWeek, - ) - - -class QuoteInfo(proto.Message): - r"""Message for infoType-dependent details parsed from quote. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - date_time (google.cloud.dlp_v2.types.DateTime): - The date time indicated by the quote. - - This field is a member of `oneof`_ ``parsed_quote``. - """ - - date_time: 'DateTime' = proto.Field( - proto.MESSAGE, - number=2, - oneof='parsed_quote', - message='DateTime', - ) - - -class DateTime(proto.Message): - r"""Message for a date time object. - e.g. 2018-01-01, 5th August. - - Attributes: - date (google.type.date_pb2.Date): - One or more of the following must be set. - Must be a valid date or time value. - day_of_week (google.type.dayofweek_pb2.DayOfWeek): - Day of week - time (google.type.timeofday_pb2.TimeOfDay): - Time of day - time_zone (google.cloud.dlp_v2.types.DateTime.TimeZone): - Time zone - """ - - class TimeZone(proto.Message): - r"""Time zone of the date time object. - - Attributes: - offset_minutes (int): - Set only if the offset can be determined. - Positive for time ahead of UTC. E.g. For - "UTC-9", this value is -540. - """ - - offset_minutes: int = proto.Field( - proto.INT32, - number=1, - ) - - date: date_pb2.Date = proto.Field( - proto.MESSAGE, - number=1, - message=date_pb2.Date, - ) - day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( - proto.ENUM, - number=2, - enum=dayofweek_pb2.DayOfWeek, - ) - time: timeofday_pb2.TimeOfDay = proto.Field( - proto.MESSAGE, - number=3, - message=timeofday_pb2.TimeOfDay, - ) - time_zone: TimeZone = proto.Field( - proto.MESSAGE, - number=4, - message=TimeZone, - ) - - -class DeidentifyConfig(proto.Message): - r"""The configuration that controls how the data will change. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): - Treat the dataset as free-form text and apply - the same free text transformation everywhere. - - This field is a member of `oneof`_ ``transformation``. - record_transformations (google.cloud.dlp_v2.types.RecordTransformations): - Treat the dataset as structured. - Transformations can be applied to specific - locations within structured datasets, such as - transforming a column within a table. - - This field is a member of `oneof`_ ``transformation``. - image_transformations (google.cloud.dlp_v2.types.ImageTransformations): - Treat the dataset as an image and redact. - - This field is a member of `oneof`_ ``transformation``. - transformation_error_handling (google.cloud.dlp_v2.types.TransformationErrorHandling): - Mode for handling transformation errors. If left - unspecified, the default mode is - ``TransformationErrorHandling.ThrowError``. - """ - - info_type_transformations: 'InfoTypeTransformations' = proto.Field( - proto.MESSAGE, - number=1, - oneof='transformation', - message='InfoTypeTransformations', - ) - record_transformations: 'RecordTransformations' = proto.Field( - proto.MESSAGE, - number=2, - oneof='transformation', - message='RecordTransformations', - ) - image_transformations: 'ImageTransformations' = proto.Field( - proto.MESSAGE, - number=4, - oneof='transformation', - message='ImageTransformations', - ) - transformation_error_handling: 'TransformationErrorHandling' = proto.Field( - proto.MESSAGE, - number=3, - message='TransformationErrorHandling', - ) - - -class ImageTransformations(proto.Message): - r"""A type of transformation that is applied over images. - - Attributes: - transforms (MutableSequence[google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation]): - - """ - - class ImageTransformation(proto.Message): - r"""Configuration for determining how redaction of images should - occur. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - selected_info_types (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.SelectedInfoTypes): - Apply transformation to the selected info_types. - - This field is a member of `oneof`_ ``target``. - all_info_types (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.AllInfoTypes): - Apply transformation to all findings not specified in other - ImageTransformation's selected_info_types. Only one instance - is allowed within the ImageTransformations message. - - This field is a member of `oneof`_ ``target``. - all_text (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.AllText): - Apply transformation to all text that doesn't - match an infoType. Only one instance is allowed - within the ImageTransformations message. - - This field is a member of `oneof`_ ``target``. - redaction_color (google.cloud.dlp_v2.types.Color): - The color to use when redacting content from - an image. If not specified, the default is - black. - """ - - class SelectedInfoTypes(proto.Message): - r"""Apply transformation to the selected info_types. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - Required. InfoTypes to apply the - transformation to. Required. Provided InfoType - must be unique within the ImageTransformations - message. - """ - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=storage.InfoType, - ) - - class AllInfoTypes(proto.Message): - r"""Apply transformation to all findings. - """ - - class AllText(proto.Message): - r"""Apply to all text. - """ - - selected_info_types: 'ImageTransformations.ImageTransformation.SelectedInfoTypes' = proto.Field( - proto.MESSAGE, - number=4, - oneof='target', - message='ImageTransformations.ImageTransformation.SelectedInfoTypes', - ) - all_info_types: 'ImageTransformations.ImageTransformation.AllInfoTypes' = proto.Field( - proto.MESSAGE, - number=5, - oneof='target', - message='ImageTransformations.ImageTransformation.AllInfoTypes', - ) - all_text: 'ImageTransformations.ImageTransformation.AllText' = proto.Field( - proto.MESSAGE, - number=6, - oneof='target', - message='ImageTransformations.ImageTransformation.AllText', - ) - redaction_color: 'Color' = proto.Field( - proto.MESSAGE, - number=3, - message='Color', - ) - - transforms: MutableSequence[ImageTransformation] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=ImageTransformation, - ) - - -class TransformationErrorHandling(proto.Message): - r"""How to handle transformation errors during de-identification. A - transformation error occurs when the requested transformation is - incompatible with the data. For example, trying to de-identify an IP - address using a ``DateShift`` transformation would result in a - transformation error, since date info cannot be extracted from an IP - address. Information about any incompatible transformations, and how - they were handled, is returned in the response as part of the - ``TransformationOverviews``. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - throw_error (google.cloud.dlp_v2.types.TransformationErrorHandling.ThrowError): - Throw an error - - This field is a member of `oneof`_ ``mode``. - leave_untransformed (google.cloud.dlp_v2.types.TransformationErrorHandling.LeaveUntransformed): - Ignore errors - - This field is a member of `oneof`_ ``mode``. - """ - - class ThrowError(proto.Message): - r"""Throw an error and fail the request when a transformation - error occurs. - - """ - - class LeaveUntransformed(proto.Message): - r"""Skips the data without modifying it if the requested transformation - would cause an error. For example, if a ``DateShift`` transformation - were applied an an IP address, this mode would leave the IP address - unchanged in the response. - - """ - - throw_error: ThrowError = proto.Field( - proto.MESSAGE, - number=1, - oneof='mode', - message=ThrowError, - ) - leave_untransformed: LeaveUntransformed = proto.Field( - proto.MESSAGE, - number=2, - oneof='mode', - message=LeaveUntransformed, - ) - - -class PrimitiveTransformation(proto.Message): - r"""A rule for transforming a value. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - replace_config (google.cloud.dlp_v2.types.ReplaceValueConfig): - Replace with a specified value. - - This field is a member of `oneof`_ ``transformation``. - redact_config (google.cloud.dlp_v2.types.RedactConfig): - Redact - - This field is a member of `oneof`_ ``transformation``. - character_mask_config (google.cloud.dlp_v2.types.CharacterMaskConfig): - Mask - - This field is a member of `oneof`_ ``transformation``. - crypto_replace_ffx_fpe_config (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig): - Ffx-Fpe - - This field is a member of `oneof`_ ``transformation``. - fixed_size_bucketing_config (google.cloud.dlp_v2.types.FixedSizeBucketingConfig): - Fixed size bucketing - - This field is a member of `oneof`_ ``transformation``. - bucketing_config (google.cloud.dlp_v2.types.BucketingConfig): - Bucketing - - This field is a member of `oneof`_ ``transformation``. - replace_with_info_type_config (google.cloud.dlp_v2.types.ReplaceWithInfoTypeConfig): - Replace with infotype - - This field is a member of `oneof`_ ``transformation``. - time_part_config (google.cloud.dlp_v2.types.TimePartConfig): - Time extraction - - This field is a member of `oneof`_ ``transformation``. - crypto_hash_config (google.cloud.dlp_v2.types.CryptoHashConfig): - Crypto - - This field is a member of `oneof`_ ``transformation``. - date_shift_config (google.cloud.dlp_v2.types.DateShiftConfig): - Date Shift - - This field is a member of `oneof`_ ``transformation``. - crypto_deterministic_config (google.cloud.dlp_v2.types.CryptoDeterministicConfig): - Deterministic Crypto - - This field is a member of `oneof`_ ``transformation``. - replace_dictionary_config (google.cloud.dlp_v2.types.ReplaceDictionaryConfig): - Replace with a value randomly drawn (with - replacement) from a dictionary. - - This field is a member of `oneof`_ ``transformation``. - """ - - replace_config: 'ReplaceValueConfig' = proto.Field( - proto.MESSAGE, - number=1, - oneof='transformation', - message='ReplaceValueConfig', - ) - redact_config: 'RedactConfig' = proto.Field( - proto.MESSAGE, - number=2, - oneof='transformation', - message='RedactConfig', - ) - character_mask_config: 'CharacterMaskConfig' = proto.Field( - proto.MESSAGE, - number=3, - oneof='transformation', - message='CharacterMaskConfig', - ) - crypto_replace_ffx_fpe_config: 'CryptoReplaceFfxFpeConfig' = proto.Field( - proto.MESSAGE, - number=4, - oneof='transformation', - message='CryptoReplaceFfxFpeConfig', - ) - fixed_size_bucketing_config: 'FixedSizeBucketingConfig' = proto.Field( - proto.MESSAGE, - number=5, - oneof='transformation', - message='FixedSizeBucketingConfig', - ) - bucketing_config: 'BucketingConfig' = proto.Field( - proto.MESSAGE, - number=6, - oneof='transformation', - message='BucketingConfig', - ) - replace_with_info_type_config: 'ReplaceWithInfoTypeConfig' = proto.Field( - proto.MESSAGE, - number=7, - oneof='transformation', - message='ReplaceWithInfoTypeConfig', - ) - time_part_config: 'TimePartConfig' = proto.Field( - proto.MESSAGE, - number=8, - oneof='transformation', - message='TimePartConfig', - ) - crypto_hash_config: 'CryptoHashConfig' = proto.Field( - proto.MESSAGE, - number=9, - oneof='transformation', - message='CryptoHashConfig', - ) - date_shift_config: 'DateShiftConfig' = proto.Field( - proto.MESSAGE, - number=11, - oneof='transformation', - message='DateShiftConfig', - ) - crypto_deterministic_config: 'CryptoDeterministicConfig' = proto.Field( - proto.MESSAGE, - number=12, - oneof='transformation', - message='CryptoDeterministicConfig', - ) - replace_dictionary_config: 'ReplaceDictionaryConfig' = proto.Field( - proto.MESSAGE, - number=13, - oneof='transformation', - message='ReplaceDictionaryConfig', - ) - - -class TimePartConfig(proto.Message): - r"""For use with ``Date``, ``Timestamp``, and ``TimeOfDay``, extract or - preserve a portion of the value. - - Attributes: - part_to_extract (google.cloud.dlp_v2.types.TimePartConfig.TimePart): - The part of the time to keep. - """ - class TimePart(proto.Enum): - r"""Components that make up time. - - Values: - TIME_PART_UNSPECIFIED (0): - Unused - YEAR (1): - [0-9999] - MONTH (2): - [1-12] - DAY_OF_MONTH (3): - [1-31] - DAY_OF_WEEK (4): - [1-7] - WEEK_OF_YEAR (5): - [1-53] - HOUR_OF_DAY (6): - [0-23] - """ - TIME_PART_UNSPECIFIED = 0 - YEAR = 1 - MONTH = 2 - DAY_OF_MONTH = 3 - DAY_OF_WEEK = 4 - WEEK_OF_YEAR = 5 - HOUR_OF_DAY = 6 - - part_to_extract: TimePart = proto.Field( - proto.ENUM, - number=1, - enum=TimePart, - ) - - -class CryptoHashConfig(proto.Message): - r"""Pseudonymization method that generates surrogates via - cryptographic hashing. Uses SHA-256. - The key size must be either 32 or 64 bytes. - Outputs a base64 encoded representation of the hashed output - (for example, L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=). - Currently, only string and integer values can be hashed. See - https://cloud.google.com/dlp/docs/pseudonymization to learn - more. - - Attributes: - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - The key used by the hash function. - """ - - crypto_key: 'CryptoKey' = proto.Field( - proto.MESSAGE, - number=1, - message='CryptoKey', - ) - - -class CryptoDeterministicConfig(proto.Message): - r"""Pseudonymization method that generates deterministic - encryption for the given input. Outputs a base64 encoded - representation of the encrypted output. Uses AES-SIV based on - the RFC https://tools.ietf.org/html/rfc5297. - - Attributes: - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - The key used by the encryption function. For - deterministic encryption using AES-SIV, the - provided key is internally expanded to 64 bytes - prior to use. - surrogate_info_type (google.cloud.dlp_v2.types.InfoType): - The custom info type to annotate the surrogate with. This - annotation will be applied to the surrogate by prefixing it - with the name of the custom info type followed by the number - of characters comprising the surrogate. The following scheme - defines the format: {info type name}({surrogate character - count}):{surrogate} - - For example, if the name of custom info type is - 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full - replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' - - This annotation identifies the surrogate when inspecting - content using the custom info type 'Surrogate'. This - facilitates reversal of the surrogate when it occurs in free - text. - - Note: For record transformations where the entire cell in a - table is being transformed, surrogates are not mandatory. - Surrogates are used to denote the location of the token and - are necessary for re-identification in free form text. - - In order for inspection to work properly, the name of this - info type must not occur naturally anywhere in your data; - otherwise, inspection may either - - - reverse a surrogate that does not correspond to an actual - identifier - - be unable to parse the surrogate and result in an error - - Therefore, choose your custom info type name carefully after - considering what your data looks like. One way to select a - name that has a high chance of yielding reliable detection - is to include one or more unicode characters that are highly - improbable to exist in your data. For example, assuming your - data is entered from a regular ASCII keyboard, the symbol - with the hex code point 29DD might be used like so: - ⧝MY_TOKEN_TYPE. - context (google.cloud.dlp_v2.types.FieldId): - A context may be used for higher security and maintaining - referential integrity such that the same identifier in two - different contexts will be given a distinct surrogate. The - context is appended to plaintext value being encrypted. On - decryption the provided context is validated against the - value used during encryption. If a context was provided - during encryption, same context must be provided during - decryption as well. - - If the context is not set, plaintext would be used as is for - encryption. If the context is set but: - - 1. there is no record present when transforming a given - value or - 2. the field is not present when transforming a given value, - - plaintext would be used as is for encryption. - - Note that case (1) is expected when an - ``InfoTypeTransformation`` is applied to both structured and - unstructured ``ContentItem``\ s. - """ - - crypto_key: 'CryptoKey' = proto.Field( - proto.MESSAGE, - number=1, - message='CryptoKey', - ) - surrogate_info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=2, - message=storage.InfoType, - ) - context: storage.FieldId = proto.Field( - proto.MESSAGE, - number=3, - message=storage.FieldId, - ) - - -class ReplaceValueConfig(proto.Message): - r"""Replace each input value with a given ``Value``. - - Attributes: - new_value (google.cloud.dlp_v2.types.Value): - Value to replace it with. - """ - - new_value: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - - -class ReplaceDictionaryConfig(proto.Message): - r"""Replace each input value with a value randomly selected from - the dictionary. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): - A list of words to select from for random replacement. The - `limits `__ page - contains details about the size limits of dictionaries. - - This field is a member of `oneof`_ ``type``. - """ - - word_list: storage.CustomInfoType.Dictionary.WordList = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.CustomInfoType.Dictionary.WordList, - ) - - -class ReplaceWithInfoTypeConfig(proto.Message): - r"""Replace each matching finding with the name of the info_type. - """ - - -class RedactConfig(proto.Message): - r"""Redact a given value. For example, if used with an - ``InfoTypeTransformation`` transforming PHONE_NUMBER, and input 'My - phone number is 206-555-0123', the output would be 'My phone number - is '. - - """ - - -class CharsToIgnore(proto.Message): - r"""Characters to skip when doing deidentification of a value. - These will be left alone and skipped. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - characters_to_skip (str): - Characters to not transform when masking. - - This field is a member of `oneof`_ ``characters``. - common_characters_to_ignore (google.cloud.dlp_v2.types.CharsToIgnore.CommonCharsToIgnore): - Common characters to not transform when - masking. Useful to avoid removing punctuation. - - This field is a member of `oneof`_ ``characters``. - """ - class CommonCharsToIgnore(proto.Enum): - r"""Convenience enum for indicating common characters to not - transform. - - Values: - COMMON_CHARS_TO_IGNORE_UNSPECIFIED (0): - Unused. - NUMERIC (1): - 0-9 - ALPHA_UPPER_CASE (2): - A-Z - ALPHA_LOWER_CASE (3): - a-z - PUNCTUATION (4): - US Punctuation, one of !"#$%&'()*+,-./:;<=>?@[]^_`{|}~ - WHITESPACE (5): - Whitespace character, one of [ \\t\n\x0B\f\r] - """ - COMMON_CHARS_TO_IGNORE_UNSPECIFIED = 0 - NUMERIC = 1 - ALPHA_UPPER_CASE = 2 - ALPHA_LOWER_CASE = 3 - PUNCTUATION = 4 - WHITESPACE = 5 - - characters_to_skip: str = proto.Field( - proto.STRING, - number=1, - oneof='characters', - ) - common_characters_to_ignore: CommonCharsToIgnore = proto.Field( - proto.ENUM, - number=2, - oneof='characters', - enum=CommonCharsToIgnore, - ) - - -class CharacterMaskConfig(proto.Message): - r"""Partially mask a string by replacing a given number of characters - with a fixed character. Masking can start from the beginning or end - of the string. This can be used on data of any type (numbers, longs, - and so on) and when de-identifying structured data we'll attempt to - preserve the original data's type. (This allows you to take a long - like 123 and modify it to a string like \**3. - - Attributes: - masking_character (str): - Character to use to mask the sensitive values—for example, - ``*`` for an alphabetic string such as a name, or ``0`` for - a numeric string such as ZIP code or credit card number. - This string must have a length of 1. If not supplied, this - value defaults to ``*`` for strings, and ``0`` for digits. - number_to_mask (int): - Number of characters to mask. If not set, all matching chars - will be masked. Skipped characters do not count towards this - tally. - - If ``number_to_mask`` is negative, this denotes inverse - masking. Cloud DLP masks all but a number of characters. For - example, suppose you have the following values: - - - ``masking_character`` is ``*`` - - ``number_to_mask`` is ``-4`` - - ``reverse_order`` is ``false`` - - ``CharsToIgnore`` includes ``-`` - - Input string is ``1234-5678-9012-3456`` - - The resulting de-identified string is - ``****-****-****-3456``. Cloud DLP masks all but the last - four characters. If ``reverse_order`` is ``true``, all but - the first four characters are masked as - ``1234-****-****-****``. - reverse_order (bool): - Mask characters in reverse order. For example, if - ``masking_character`` is ``0``, ``number_to_mask`` is - ``14``, and ``reverse_order`` is ``false``, then the input - string ``1234-5678-9012-3456`` is masked as - ``00000000000000-3456``. If ``masking_character`` is ``*``, - ``number_to_mask`` is ``3``, and ``reverse_order`` is - ``true``, then the string ``12345`` is masked as ``12***``. - characters_to_ignore (MutableSequence[google.cloud.dlp_v2.types.CharsToIgnore]): - When masking a string, items in this list will be skipped - when replacing characters. For example, if the input string - is ``555-555-5555`` and you instruct Cloud DLP to skip ``-`` - and mask 5 characters with ``*``, Cloud DLP returns - ``***-**5-5555``. - """ - - masking_character: str = proto.Field( - proto.STRING, - number=1, - ) - number_to_mask: int = proto.Field( - proto.INT32, - number=2, - ) - reverse_order: bool = proto.Field( - proto.BOOL, - number=3, - ) - characters_to_ignore: MutableSequence['CharsToIgnore'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='CharsToIgnore', - ) - - -class FixedSizeBucketingConfig(proto.Message): - r"""Buckets values based on fixed size ranges. The Bucketing - transformation can provide all of this functionality, but requires - more configuration. This message is provided as a convenience to the - user for simple bucketing strategies. - - The transformed value will be a hyphenated string of - {lower_bound}-{upper_bound}. For example, if lower_bound = 10 and - upper_bound = 20, all values that are within this bucket will be - replaced with "10-20". - - This can be used on data of type: double, long. - - If the bound Value type differs from the type of data being - transformed, we will first attempt converting the type of the data - to be transformed to match the type of the bound before comparing. - - See https://cloud.google.com/dlp/docs/concepts-bucketing to learn - more. - - Attributes: - lower_bound (google.cloud.dlp_v2.types.Value): - Required. Lower bound value of buckets. All values less than - ``lower_bound`` are grouped together into a single bucket; - for example if ``lower_bound`` = 10, then all values less - than 10 are replaced with the value "-10". - upper_bound (google.cloud.dlp_v2.types.Value): - Required. Upper bound value of buckets. All values greater - than upper_bound are grouped together into a single bucket; - for example if ``upper_bound`` = 89, then all values greater - than 89 are replaced with the value "89+". - bucket_size (float): - Required. Size of each bucket (except for minimum and - maximum buckets). So if ``lower_bound`` = 10, - ``upper_bound`` = 89, and ``bucket_size`` = 10, then the - following buckets would be used: -10, 10-20, 20-30, 30-40, - 40-50, 50-60, 60-70, 70-80, 80-89, 89+. Precision up to 2 - decimals works. - """ - - lower_bound: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - upper_bound: 'Value' = proto.Field( - proto.MESSAGE, - number=2, - message='Value', - ) - bucket_size: float = proto.Field( - proto.DOUBLE, - number=3, - ) - - -class BucketingConfig(proto.Message): - r"""Generalization function that buckets values based on ranges. The - ranges and replacement values are dynamically provided by the user - for custom behavior, such as 1-30 -> LOW 31-65 -> MEDIUM 66-100 -> - HIGH This can be used on data of type: number, long, string, - timestamp. If the bound ``Value`` type differs from the type of data - being transformed, we will first attempt converting the type of the - data to be transformed to match the type of the bound before - comparing. See https://cloud.google.com/dlp/docs/concepts-bucketing - to learn more. - - Attributes: - buckets (MutableSequence[google.cloud.dlp_v2.types.BucketingConfig.Bucket]): - Set of buckets. Ranges must be - non-overlapping. - """ - - class Bucket(proto.Message): - r"""Bucket is represented as a range, along with replacement - values. - - Attributes: - min_ (google.cloud.dlp_v2.types.Value): - Lower bound of the range, inclusive. Type - should be the same as max if used. - max_ (google.cloud.dlp_v2.types.Value): - Upper bound of the range, exclusive; type - must match min. - replacement_value (google.cloud.dlp_v2.types.Value): - Required. Replacement value for this bucket. - """ - - min_: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - max_: 'Value' = proto.Field( - proto.MESSAGE, - number=2, - message='Value', - ) - replacement_value: 'Value' = proto.Field( - proto.MESSAGE, - number=3, - message='Value', - ) - - buckets: MutableSequence[Bucket] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=Bucket, - ) - - -class CryptoReplaceFfxFpeConfig(proto.Message): - r"""Replaces an identifier with a surrogate using Format Preserving - Encryption (FPE) with the FFX mode of operation; however when used - in the ``ReidentifyContent`` API method, it serves the opposite - function by reversing the surrogate back into the original - identifier. The identifier must be encoded as ASCII. For a given - crypto key and context, the same identifier will be replaced with - the same surrogate. Identifiers must be at least two characters - long. In the case that the identifier is the empty string, it will - be skipped. See https://cloud.google.com/dlp/docs/pseudonymization - to learn more. - - Note: We recommend using CryptoDeterministicConfig for all use cases - which do not require preserving the input alphabet space and size, - plus warrant referential integrity. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - Required. The key used by the encryption - algorithm. - context (google.cloud.dlp_v2.types.FieldId): - The 'tweak', a context may be used for higher security since - the same identifier in two different contexts won't be given - the same surrogate. If the context is not set, a default - tweak will be used. - - If the context is set but: - - 1. there is no record present when transforming a given - value or - 2. the field is not present when transforming a given value, - - a default tweak will be used. - - Note that case (1) is expected when an - ``InfoTypeTransformation`` is applied to both structured and - unstructured ``ContentItem``\ s. Currently, the referenced - field may be of value type integer or string. - - The tweak is constructed as a sequence of bytes in big - endian byte order such that: - - - a 64 bit integer is encoded followed by a single byte of - value 1 - - a string is encoded in UTF-8 format followed by a single - byte of value 2 - common_alphabet (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig.FfxCommonNativeAlphabet): - Common alphabets. - - This field is a member of `oneof`_ ``alphabet``. - custom_alphabet (str): - This is supported by mapping these to the alphanumeric - characters that the FFX mode natively supports. This happens - before/after encryption/decryption. Each character listed - must appear only once. Number of characters must be in the - range [2, 95]. This must be encoded as ASCII. The order of - characters does not matter. The full list of allowed - characters is: - 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz - ~`!@#$%^&*()_-+={[}]|:;"'<,>.?/ - - This field is a member of `oneof`_ ``alphabet``. - radix (int): - The native way to select the alphabet. Must be in the range - [2, 95]. - - This field is a member of `oneof`_ ``alphabet``. - surrogate_info_type (google.cloud.dlp_v2.types.InfoType): - The custom infoType to annotate the surrogate with. This - annotation will be applied to the surrogate by prefixing it - with the name of the custom infoType followed by the number - of characters comprising the surrogate. The following scheme - defines the format: - info_type_name(surrogate_character_count):surrogate - - For example, if the name of custom infoType is - 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full - replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' - - This annotation identifies the surrogate when inspecting - content using the custom infoType - ```SurrogateType`` `__. - This facilitates reversal of the surrogate when it occurs in - free text. - - In order for inspection to work properly, the name of this - infoType must not occur naturally anywhere in your data; - otherwise, inspection may find a surrogate that does not - correspond to an actual identifier. Therefore, choose your - custom infoType name carefully after considering what your - data looks like. One way to select a name that has a high - chance of yielding reliable detection is to include one or - more unicode characters that are highly improbable to exist - in your data. For example, assuming your data is entered - from a regular ASCII keyboard, the symbol with the hex code - point 29DD might be used like so: ⧝MY_TOKEN_TYPE - """ - class FfxCommonNativeAlphabet(proto.Enum): - r"""These are commonly used subsets of the alphabet that the FFX - mode natively supports. In the algorithm, the alphabet is - selected using the "radix". Therefore each corresponds to a - particular radix. - - Values: - FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED (0): - Unused. - NUMERIC (1): - ``[0-9]`` (radix of 10) - HEXADECIMAL (2): - ``[0-9A-F]`` (radix of 16) - UPPER_CASE_ALPHA_NUMERIC (3): - ``[0-9A-Z]`` (radix of 36) - ALPHA_NUMERIC (4): - ``[0-9A-Za-z]`` (radix of 62) - """ - FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED = 0 - NUMERIC = 1 - HEXADECIMAL = 2 - UPPER_CASE_ALPHA_NUMERIC = 3 - ALPHA_NUMERIC = 4 - - crypto_key: 'CryptoKey' = proto.Field( - proto.MESSAGE, - number=1, - message='CryptoKey', - ) - context: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - common_alphabet: FfxCommonNativeAlphabet = proto.Field( - proto.ENUM, - number=4, - oneof='alphabet', - enum=FfxCommonNativeAlphabet, - ) - custom_alphabet: str = proto.Field( - proto.STRING, - number=5, - oneof='alphabet', - ) - radix: int = proto.Field( - proto.INT32, - number=6, - oneof='alphabet', - ) - surrogate_info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=8, - message=storage.InfoType, - ) - - -class CryptoKey(proto.Message): - r"""This is a data encryption key (DEK) (as opposed to - a key encryption key (KEK) stored by Cloud Key Management - Service (Cloud KMS). - When using Cloud KMS to wrap or unwrap a DEK, be sure to set an - appropriate IAM policy on the KEK to ensure an attacker cannot - unwrap the DEK. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - transient (google.cloud.dlp_v2.types.TransientCryptoKey): - Transient crypto key - - This field is a member of `oneof`_ ``source``. - unwrapped (google.cloud.dlp_v2.types.UnwrappedCryptoKey): - Unwrapped crypto key - - This field is a member of `oneof`_ ``source``. - kms_wrapped (google.cloud.dlp_v2.types.KmsWrappedCryptoKey): - Key wrapped using Cloud KMS - - This field is a member of `oneof`_ ``source``. - """ - - transient: 'TransientCryptoKey' = proto.Field( - proto.MESSAGE, - number=1, - oneof='source', - message='TransientCryptoKey', - ) - unwrapped: 'UnwrappedCryptoKey' = proto.Field( - proto.MESSAGE, - number=2, - oneof='source', - message='UnwrappedCryptoKey', - ) - kms_wrapped: 'KmsWrappedCryptoKey' = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message='KmsWrappedCryptoKey', - ) - - -class TransientCryptoKey(proto.Message): - r"""Use this to have a random data crypto key generated. - It will be discarded after the request finishes. - - Attributes: - name (str): - Required. Name of the key. This is an arbitrary string used - to differentiate different keys. A unique key is generated - per name: two separate ``TransientCryptoKey`` protos share - the same generated key if their names are the same. When the - data crypto key is generated, this name is not used in any - way (repeating the api call will result in a different key - being generated). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UnwrappedCryptoKey(proto.Message): - r"""Using raw keys is prone to security risks due to accidentally - leaking the key. Choose another type of key if possible. - - Attributes: - key (bytes): - Required. A 128/192/256 bit key. - """ - - key: bytes = proto.Field( - proto.BYTES, - number=1, - ) - - -class KmsWrappedCryptoKey(proto.Message): - r"""Include to use an existing data crypto key wrapped by KMS. The - wrapped key must be a 128-, 192-, or 256-bit key. Authorization - requires the following IAM permissions when sending a request to - perform a crypto transformation using a KMS-wrapped crypto key: - dlp.kms.encrypt - - For more information, see [Creating a wrapped key] - (https://cloud.google.com/dlp/docs/create-wrapped-key). - - Note: When you use Cloud KMS for cryptographic operations, `charges - apply `__. - - Attributes: - wrapped_key (bytes): - Required. The wrapped data crypto key. - crypto_key_name (str): - Required. The resource name of the KMS - CryptoKey to use for unwrapping. - """ - - wrapped_key: bytes = proto.Field( - proto.BYTES, - number=1, - ) - crypto_key_name: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DateShiftConfig(proto.Message): - r"""Shifts dates by random number of days, with option to be - consistent for the same context. See - https://cloud.google.com/dlp/docs/concepts-date-shifting to - learn more. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - upper_bound_days (int): - Required. Range of shift in days. Actual - shift will be selected at random within this - range (inclusive ends). Negative means shift to - earlier in time. Must not be more than 365250 - days (1000 years) each direction. - For example, 3 means shift date to at most 3 - days into the future. - lower_bound_days (int): - Required. For example, -5 means shift date to - at most 5 days back in the past. - context (google.cloud.dlp_v2.types.FieldId): - Points to the field that contains the - context, for example, an entity id. If set, must - also set cryptoKey. If set, shift will be - consistent for the given context. - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - Causes the shift to be computed based on this key and the - context. This results in the same shift for the same context - and crypto_key. If set, must also set context. Can only be - applied to table items. - - This field is a member of `oneof`_ ``method``. - """ - - upper_bound_days: int = proto.Field( - proto.INT32, - number=1, - ) - lower_bound_days: int = proto.Field( - proto.INT32, - number=2, - ) - context: storage.FieldId = proto.Field( - proto.MESSAGE, - number=3, - message=storage.FieldId, - ) - crypto_key: 'CryptoKey' = proto.Field( - proto.MESSAGE, - number=4, - oneof='method', - message='CryptoKey', - ) - - -class InfoTypeTransformations(proto.Message): - r"""A type of transformation that will scan unstructured text and apply - various ``PrimitiveTransformation``\ s to each finding, where the - transformation is applied to only values that were identified as a - specific info_type. - - Attributes: - transformations (MutableSequence[google.cloud.dlp_v2.types.InfoTypeTransformations.InfoTypeTransformation]): - Required. Transformation for each infoType. - Cannot specify more than one for a given - infoType. - """ - - class InfoTypeTransformation(proto.Message): - r"""A transformation to apply to text that is identified as a specific - info_type. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - InfoTypes to apply the transformation to. An empty list will - cause this transformation to apply to all findings that - correspond to infoTypes that were requested in - ``InspectConfig``. - primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): - Required. Primitive transformation to apply - to the infoType. - """ - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - primitive_transformation: 'PrimitiveTransformation' = proto.Field( - proto.MESSAGE, - number=2, - message='PrimitiveTransformation', - ) - - transformations: MutableSequence[InfoTypeTransformation] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=InfoTypeTransformation, - ) - - -class FieldTransformation(proto.Message): - r"""The transformation to apply to the field. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Required. Input field(s) to apply the transformation to. - When you have columns that reference their position within a - list, omit the index from the FieldId. FieldId name matching - ignores the index. For example, instead of - "contact.nums[0].type", use "contact.nums.type". - condition (google.cloud.dlp_v2.types.RecordCondition): - Only apply the transformation if the condition evaluates to - true for the given ``RecordCondition``. The conditions are - allowed to reference fields that are not used in the actual - transformation. - - Example Use Cases: - - - Apply a different bucket transformation to an age column - if the zip code column for the same record is within a - specific range. - - Redact a field if the date of birth field is greater than - 85. - primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): - Apply the transformation to the entire field. - - This field is a member of `oneof`_ ``transformation``. - info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): - Treat the contents of the field as free text, and - selectively transform content that matches an ``InfoType``. - - This field is a member of `oneof`_ ``transformation``. - """ - - fields: MutableSequence[storage.FieldId] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - condition: 'RecordCondition' = proto.Field( - proto.MESSAGE, - number=3, - message='RecordCondition', - ) - primitive_transformation: 'PrimitiveTransformation' = proto.Field( - proto.MESSAGE, - number=4, - oneof='transformation', - message='PrimitiveTransformation', - ) - info_type_transformations: 'InfoTypeTransformations' = proto.Field( - proto.MESSAGE, - number=5, - oneof='transformation', - message='InfoTypeTransformations', - ) - - -class RecordTransformations(proto.Message): - r"""A type of transformation that is applied over structured data - such as a table. - - Attributes: - field_transformations (MutableSequence[google.cloud.dlp_v2.types.FieldTransformation]): - Transform the record by applying various - field transformations. - record_suppressions (MutableSequence[google.cloud.dlp_v2.types.RecordSuppression]): - Configuration defining which records get - suppressed entirely. Records that match any - suppression rule are omitted from the output. - """ - - field_transformations: MutableSequence['FieldTransformation'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='FieldTransformation', - ) - record_suppressions: MutableSequence['RecordSuppression'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='RecordSuppression', - ) - - -class RecordSuppression(proto.Message): - r"""Configuration to suppress records whose suppression - conditions evaluate to true. - - Attributes: - condition (google.cloud.dlp_v2.types.RecordCondition): - A condition that when it evaluates to true - will result in the record being evaluated to be - suppressed from the transformed content. - """ - - condition: 'RecordCondition' = proto.Field( - proto.MESSAGE, - number=1, - message='RecordCondition', - ) - - -class RecordCondition(proto.Message): - r"""A condition for determining whether a transformation should - be applied to a field. - - Attributes: - expressions (google.cloud.dlp_v2.types.RecordCondition.Expressions): - An expression. - """ - - class Condition(proto.Message): - r"""The field type of ``value`` and ``field`` do not need to match to be - considered equal, but not all comparisons are possible. EQUAL_TO and - NOT_EQUAL_TO attempt to compare even with incompatible types, but - all other comparisons are invalid with incompatible types. A - ``value`` of type: - - - ``string`` can be compared against all other types - - ``boolean`` can only be compared against other booleans - - ``integer`` can be compared against doubles or a string if the - string value can be parsed as an integer. - - ``double`` can be compared against integers or a string if the - string can be parsed as a double. - - ``Timestamp`` can be compared against strings in RFC 3339 date - string format. - - ``TimeOfDay`` can be compared against timestamps and strings in - the format of 'HH:mm:ss'. - - If we fail to compare do to type mismatch, a warning will be given - and the condition will evaluate to false. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Required. Field within the record this - condition is evaluated against. - operator (google.cloud.dlp_v2.types.RelationalOperator): - Required. Operator used to compare the field - or infoType to the value. - value (google.cloud.dlp_v2.types.Value): - Value to compare against. [Mandatory, except for ``EXISTS`` - tests.] - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - operator: 'RelationalOperator' = proto.Field( - proto.ENUM, - number=3, - enum='RelationalOperator', - ) - value: 'Value' = proto.Field( - proto.MESSAGE, - number=4, - message='Value', - ) - - class Conditions(proto.Message): - r"""A collection of conditions. - - Attributes: - conditions (MutableSequence[google.cloud.dlp_v2.types.RecordCondition.Condition]): - A collection of conditions. - """ - - conditions: MutableSequence['RecordCondition.Condition'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='RecordCondition.Condition', - ) - - class Expressions(proto.Message): - r"""An expression, consisting of an operator and conditions. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - logical_operator (google.cloud.dlp_v2.types.RecordCondition.Expressions.LogicalOperator): - The operator to apply to the result of conditions. Default - and currently only supported value is ``AND``. - conditions (google.cloud.dlp_v2.types.RecordCondition.Conditions): - Conditions to apply to the expression. - - This field is a member of `oneof`_ ``type``. - """ - class LogicalOperator(proto.Enum): - r"""Logical operators for conditional checks. - - Values: - LOGICAL_OPERATOR_UNSPECIFIED (0): - Unused - AND (1): - Conditional AND - """ - LOGICAL_OPERATOR_UNSPECIFIED = 0 - AND = 1 - - logical_operator: 'RecordCondition.Expressions.LogicalOperator' = proto.Field( - proto.ENUM, - number=1, - enum='RecordCondition.Expressions.LogicalOperator', - ) - conditions: 'RecordCondition.Conditions' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='RecordCondition.Conditions', - ) - - expressions: Expressions = proto.Field( - proto.MESSAGE, - number=3, - message=Expressions, - ) - - -class TransformationOverview(proto.Message): - r"""Overview of the modifications that occurred. - - Attributes: - transformed_bytes (int): - Total size in bytes that were transformed in - some way. - transformation_summaries (MutableSequence[google.cloud.dlp_v2.types.TransformationSummary]): - Transformations applied to the dataset. - """ - - transformed_bytes: int = proto.Field( - proto.INT64, - number=2, - ) - transformation_summaries: MutableSequence['TransformationSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='TransformationSummary', - ) - - -class TransformationSummary(proto.Message): - r"""Summary of a single transformation. Only one of 'transformation', - 'field_transformation', or 'record_suppress' will be set. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - Set if the transformation was limited to a - specific InfoType. - field (google.cloud.dlp_v2.types.FieldId): - Set if the transformation was limited to a - specific FieldId. - transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): - The specific transformation these stats apply - to. - field_transformations (MutableSequence[google.cloud.dlp_v2.types.FieldTransformation]): - The field transformation that was applied. - If multiple field transformations are requested - for a single field, this list will contain all - of them; otherwise, only one is supplied. - record_suppress (google.cloud.dlp_v2.types.RecordSuppression): - The specific suppression option these stats - apply to. - results (MutableSequence[google.cloud.dlp_v2.types.TransformationSummary.SummaryResult]): - Collection of all transformations that took - place or had an error. - transformed_bytes (int): - Total size in bytes that were transformed in - some way. - """ - class TransformationResultCode(proto.Enum): - r"""Possible outcomes of transformations. - - Values: - TRANSFORMATION_RESULT_CODE_UNSPECIFIED (0): - Unused - SUCCESS (1): - Transformation completed without an error. - ERROR (2): - Transformation had an error. - """ - TRANSFORMATION_RESULT_CODE_UNSPECIFIED = 0 - SUCCESS = 1 - ERROR = 2 - - class SummaryResult(proto.Message): - r"""A collection that informs the user the number of times a particular - ``TransformationResultCode`` and error details occurred. - - Attributes: - count (int): - Number of transformations counted by this - result. - code (google.cloud.dlp_v2.types.TransformationSummary.TransformationResultCode): - Outcome of the transformation. - details (str): - A place for warnings or errors to show up if - a transformation didn't work as expected. - """ - - count: int = proto.Field( - proto.INT64, - number=1, - ) - code: 'TransformationSummary.TransformationResultCode' = proto.Field( - proto.ENUM, - number=2, - enum='TransformationSummary.TransformationResultCode', - ) - details: str = proto.Field( - proto.STRING, - number=3, - ) - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - transformation: 'PrimitiveTransformation' = proto.Field( - proto.MESSAGE, - number=3, - message='PrimitiveTransformation', - ) - field_transformations: MutableSequence['FieldTransformation'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='FieldTransformation', - ) - record_suppress: 'RecordSuppression' = proto.Field( - proto.MESSAGE, - number=6, - message='RecordSuppression', - ) - results: MutableSequence[SummaryResult] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=SummaryResult, - ) - transformed_bytes: int = proto.Field( - proto.INT64, - number=7, - ) - - -class TransformationDescription(proto.Message): - r"""A flattened description of a ``PrimitiveTransformation`` or - ``RecordSuppression``. - - Attributes: - type_ (google.cloud.dlp_v2.types.TransformationType): - The transformation type. - description (str): - A description of the transformation. This is empty for a - RECORD_SUPPRESSION, or is the output of calling toString() - on the ``PrimitiveTransformation`` protocol buffer message - for any other type of transformation. - condition (str): - A human-readable string representation of the - ``RecordCondition`` corresponding to this transformation. - Set if a ``RecordCondition`` was used to determine whether - or not to apply this transformation. - - Examples: \* (age_field > 85) \* (age_field <= 18) \* - (zip_field exists) \* (zip_field == 01234) && (city_field != - "Springville") \* (zip_field == 01234) && (age_field <= 18) - && (city_field exists) - info_type (google.cloud.dlp_v2.types.InfoType): - Set if the transformation was limited to a specific - ``InfoType``. - """ - - type_: 'TransformationType' = proto.Field( - proto.ENUM, - number=1, - enum='TransformationType', - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - condition: str = proto.Field( - proto.STRING, - number=3, - ) - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=4, - message=storage.InfoType, - ) - - -class TransformationDetails(proto.Message): - r"""Details about a single transformation. This object contains a - description of the transformation, information about whether the - transformation was successfully applied, and the precise - location where the transformation occurred. These details are - stored in a user-specified BigQuery table. - - Attributes: - resource_name (str): - The name of the job that completed the - transformation. - container_name (str): - The top level name of the container where the - transformation is located (this will be the - source file name or table name). - transformation (MutableSequence[google.cloud.dlp_v2.types.TransformationDescription]): - Description of transformation. This would only contain more - than one element if there were multiple matching - transformations and which one to apply was ambiguous. Not - set for states that contain no transformation, currently - only state that contains no transformation is - TransformationResultStateType.METADATA_UNRETRIEVABLE. - status_details (google.cloud.dlp_v2.types.TransformationResultStatus): - Status of the transformation, if - transformation was not successful, this will - specify what caused it to fail, otherwise it - will show that the transformation was - successful. - transformed_bytes (int): - The number of bytes that were transformed. If - transformation was unsuccessful or did not take - place because there was no content to transform, - this will be zero. - transformation_location (google.cloud.dlp_v2.types.TransformationLocation): - The precise location of the transformed - content in the original container. - """ - - resource_name: str = proto.Field( - proto.STRING, - number=1, - ) - container_name: str = proto.Field( - proto.STRING, - number=2, - ) - transformation: MutableSequence['TransformationDescription'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='TransformationDescription', - ) - status_details: 'TransformationResultStatus' = proto.Field( - proto.MESSAGE, - number=4, - message='TransformationResultStatus', - ) - transformed_bytes: int = proto.Field( - proto.INT64, - number=5, - ) - transformation_location: 'TransformationLocation' = proto.Field( - proto.MESSAGE, - number=6, - message='TransformationLocation', - ) - - -class TransformationLocation(proto.Message): - r"""Specifies the location of a transformation. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - finding_id (str): - For infotype transformations, link to the - corresponding findings ID so that location - information does not need to be duplicated. Each - findings ID correlates to an entry in the - findings output table, this table only gets - created when users specify to save findings (add - the save findings action to the request). - - This field is a member of `oneof`_ ``location_type``. - record_transformation (google.cloud.dlp_v2.types.RecordTransformation): - For record transformations, provide a field - and container information. - - This field is a member of `oneof`_ ``location_type``. - container_type (google.cloud.dlp_v2.types.TransformationContainerType): - Information about the functionality of the - container where this finding occurred, if - available. - """ - - finding_id: str = proto.Field( - proto.STRING, - number=1, - oneof='location_type', - ) - record_transformation: 'RecordTransformation' = proto.Field( - proto.MESSAGE, - number=2, - oneof='location_type', - message='RecordTransformation', - ) - container_type: 'TransformationContainerType' = proto.Field( - proto.ENUM, - number=3, - enum='TransformationContainerType', - ) - - -class RecordTransformation(proto.Message): - r""" - - Attributes: - field_id (google.cloud.dlp_v2.types.FieldId): - For record transformations, provide a field. - container_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Findings container modification timestamp, if - applicable. - container_version (str): - Container version, if available ("generation" - for Cloud Storage). - """ - - field_id: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - container_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - container_version: str = proto.Field( - proto.STRING, - number=3, - ) - - -class TransformationResultStatus(proto.Message): - r""" - - Attributes: - result_status_type (google.cloud.dlp_v2.types.TransformationResultStatusType): - Transformation result status type, this will - be either SUCCESS, or it will be the reason for - why the transformation was not completely - successful. - details (google.rpc.status_pb2.Status): - Detailed error codes and messages - """ - - result_status_type: 'TransformationResultStatusType' = proto.Field( - proto.ENUM, - number=1, - enum='TransformationResultStatusType', - ) - details: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=2, - message=status_pb2.Status, - ) - - -class TransformationDetailsStorageConfig(proto.Message): - r"""Config for storing transformation details. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - The BigQuery table in which to store the output. This may be - an existing table or in a new table in an existing dataset. - If table_id is not set a new one will be generated for you - with the following format: - dlp_googleapis_transformation_details_yyyy_mm_dd_[dlp_job_id]. - Pacific time zone will be used for generating the date - details. - - This field is a member of `oneof`_ ``type``. - """ - - table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.BigQueryTable, - ) - - -class Schedule(proto.Message): - r"""Schedule for inspect job triggers. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - recurrence_period_duration (google.protobuf.duration_pb2.Duration): - With this option a job is started on a - regular periodic basis. For example: every day - (86400 seconds). - A scheduled start time will be skipped if the - previous execution has not ended when its - scheduled time occurs. - This value must be set to a time duration - greater than or equal to 1 day and can be no - longer than 60 days. - - This field is a member of `oneof`_ ``option``. - """ - - recurrence_period_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=1, - oneof='option', - message=duration_pb2.Duration, - ) - - -class Manual(proto.Message): - r"""Job trigger option for hybrid jobs. Jobs must be manually - created and finished. - - """ - - -class InspectTemplate(proto.Message): - r"""The inspectTemplate contains a configuration (set of types of - sensitive data to be detected) to be used anywhere you otherwise - would normally specify InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates to learn - more. - - Attributes: - name (str): - Output only. The template name. - - The template will have one of the following formats: - ``projects/PROJECT_ID/inspectTemplates/TEMPLATE_ID`` OR - ``organizations/ORGANIZATION_ID/inspectTemplates/TEMPLATE_ID``; - display_name (str): - Display name (max 256 chars). - description (str): - Short description (max 256 chars). - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of an - inspectTemplate. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of an - inspectTemplate. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - The core content of the template. - Configuration of the scanning process. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=6, - message='InspectConfig', - ) - - -class DeidentifyTemplate(proto.Message): - r"""DeidentifyTemplates contains instructions on how to - de-identify content. See - https://cloud.google.com/dlp/docs/concepts-templates to learn - more. - - Attributes: - name (str): - Output only. The template name. - - The template will have one of the following formats: - ``projects/PROJECT_ID/deidentifyTemplates/TEMPLATE_ID`` OR - ``organizations/ORGANIZATION_ID/deidentifyTemplates/TEMPLATE_ID`` - display_name (str): - Display name (max 256 chars). - description (str): - Short description (max 256 chars). - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of an - inspectTemplate. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of an - inspectTemplate. - deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): - The core content of the template. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - deidentify_config: 'DeidentifyConfig' = proto.Field( - proto.MESSAGE, - number=6, - message='DeidentifyConfig', - ) - - -class Error(proto.Message): - r"""Details information about an error encountered during job - execution or the results of an unsuccessful activation of the - JobTrigger. - - Attributes: - details (google.rpc.status_pb2.Status): - Detailed error codes and messages. - timestamps (MutableSequence[google.protobuf.timestamp_pb2.Timestamp]): - The times the error occurred. - """ - - details: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=1, - message=status_pb2.Status, - ) - timestamps: MutableSequence[timestamp_pb2.Timestamp] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class JobTrigger(proto.Message): - r"""Contains a configuration to make dlp api calls on a repeating - basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers to learn - more. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Unique resource name for the triggeredJob, assigned by the - service when the triggeredJob is created, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - display_name (str): - Display name (max 100 chars) - description (str): - User provided description (max 256 chars) - inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): - For inspect jobs, a snapshot of the - configuration. - - This field is a member of `oneof`_ ``job``. - triggers (MutableSequence[google.cloud.dlp_v2.types.JobTrigger.Trigger]): - A list of triggers which will be OR'ed - together. Only one in the list needs to trigger - for a job to be started. The list may contain - only a single Schedule trigger and must have at - least one object. - errors (MutableSequence[google.cloud.dlp_v2.types.Error]): - Output only. A stream of errors encountered - when the trigger was activated. Repeated errors - may result in the JobTrigger automatically being - paused. Will return the last 100 errors. - Whenever the JobTrigger is modified this list - will be cleared. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of a - triggeredJob. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of a - triggeredJob. - last_run_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp of the last time - this trigger executed. - status (google.cloud.dlp_v2.types.JobTrigger.Status): - Required. A status for this trigger. - """ - class Status(proto.Enum): - r"""Whether the trigger is currently active. If PAUSED or - CANCELLED, no jobs will be created with this configuration. The - service may automatically pause triggers experiencing frequent - errors. To restart a job, set the status to HEALTHY after - correcting user errors. - - Values: - STATUS_UNSPECIFIED (0): - Unused. - HEALTHY (1): - Trigger is healthy. - PAUSED (2): - Trigger is temporarily paused. - CANCELLED (3): - Trigger is cancelled and can not be resumed. - """ - STATUS_UNSPECIFIED = 0 - HEALTHY = 1 - PAUSED = 2 - CANCELLED = 3 - - class Trigger(proto.Message): - r"""What event needs to occur for a new job to be started. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - schedule (google.cloud.dlp_v2.types.Schedule): - Create a job on a repeating basis based on - the elapse of time. - - This field is a member of `oneof`_ ``trigger``. - manual (google.cloud.dlp_v2.types.Manual): - For use with hybrid jobs. Jobs must be - manually created and finished. - - This field is a member of `oneof`_ ``trigger``. - """ - - schedule: 'Schedule' = proto.Field( - proto.MESSAGE, - number=1, - oneof='trigger', - message='Schedule', - ) - manual: 'Manual' = proto.Field( - proto.MESSAGE, - number=2, - oneof='trigger', - message='Manual', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - inspect_job: 'InspectJobConfig' = proto.Field( - proto.MESSAGE, - number=4, - oneof='job', - message='InspectJobConfig', - ) - triggers: MutableSequence[Trigger] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=Trigger, - ) - errors: MutableSequence['Error'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='Error', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - last_run_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - status: Status = proto.Field( - proto.ENUM, - number=10, - enum=Status, - ) - - -class Action(proto.Message): - r"""A task to execute on the completion of a job. - See https://cloud.google.com/dlp/docs/concepts-actions to learn - more. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - save_findings (google.cloud.dlp_v2.types.Action.SaveFindings): - Save resulting findings in a provided - location. - - This field is a member of `oneof`_ ``action``. - pub_sub (google.cloud.dlp_v2.types.Action.PublishToPubSub): - Publish a notification to a Pub/Sub topic. - - This field is a member of `oneof`_ ``action``. - publish_summary_to_cscc (google.cloud.dlp_v2.types.Action.PublishSummaryToCscc): - Publish summary to Cloud Security Command - Center (Alpha). - - This field is a member of `oneof`_ ``action``. - publish_findings_to_cloud_data_catalog (google.cloud.dlp_v2.types.Action.PublishFindingsToCloudDataCatalog): - Publish findings to Cloud Datahub. - - This field is a member of `oneof`_ ``action``. - deidentify (google.cloud.dlp_v2.types.Action.Deidentify): - Create a de-identified copy of the input - data. - - This field is a member of `oneof`_ ``action``. - job_notification_emails (google.cloud.dlp_v2.types.Action.JobNotificationEmails): - Sends an email when the job completes. The email goes to IAM - project owners and technical `Essential - Contacts `__. - - This field is a member of `oneof`_ ``action``. - publish_to_stackdriver (google.cloud.dlp_v2.types.Action.PublishToStackdriver): - Enable Stackdriver metric dlp.googleapis.com/finding_count. - - This field is a member of `oneof`_ ``action``. - """ - - class SaveFindings(proto.Message): - r"""If set, the detailed findings will be persisted to the - specified OutputStorageConfig. Only a single instance of this - action can be specified. - Compatible with: Inspect, Risk - - Attributes: - output_config (google.cloud.dlp_v2.types.OutputStorageConfig): - Location to store findings outside of DLP. - """ - - output_config: 'OutputStorageConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='OutputStorageConfig', - ) - - class PublishToPubSub(proto.Message): - r"""Publish a message into a given Pub/Sub topic when DlpJob has - completed. The message contains a single field, ``DlpJobName``, - which is equal to the finished job's - ```DlpJob.name`` `__. - Compatible with: Inspect, Risk - - Attributes: - topic (str): - Cloud Pub/Sub topic to send notifications to. - The topic must have given publishing access - rights to the DLP API service account executing - the long running DlpJob sending the - notifications. Format is - projects/{project}/topics/{topic}. - """ - - topic: str = proto.Field( - proto.STRING, - number=1, - ) - - class PublishSummaryToCscc(proto.Message): - r"""Publish the result summary of a DlpJob to the Cloud Security - Command Center (CSCC Alpha). - This action is only available for projects which are parts of an - organization and whitelisted for the alpha Cloud Security - Command Center. - The action will publish the count of finding instances and their - info types. The summary of findings will be persisted in CSCC - and are governed by CSCC service-specific policy, see - https://cloud.google.com/terms/service-terms Only a single - instance of this action can be specified. Compatible with: - Inspect - - """ - - class PublishFindingsToCloudDataCatalog(proto.Message): - r"""Publish findings of a DlpJob to Data Catalog. In Data Catalog, tag - templates are applied to the resource that Cloud DLP scanned. Data - Catalog tag templates are stored in the same project and region - where the BigQuery table exists. For Cloud DLP to create and apply - the tag template, the Cloud DLP service agent must have the - ``roles/datacatalog.tagTemplateOwner`` permission on the project. - The tag template contains fields summarizing the results of the - DlpJob. Any field values previously written by another DlpJob are - deleted. [InfoType naming patterns][google.privacy.dlp.v2.InfoType] - are strictly enforced when using this feature. - - Findings are persisted in Data Catalog storage and are governed by - service-specific policies for Data Catalog. For more information, - see `Service Specific - Terms `__. - - Only a single instance of this action can be specified. This action - is allowed only if all resources being scanned are BigQuery tables. - Compatible with: Inspect - - """ - - class Deidentify(proto.Message): - r"""Create a de-identified copy of the requested table or files. - - A TransformationDetail will be created for each transformation. - - If any rows in BigQuery are skipped during de-identification - (transformation errors or row size exceeds BigQuery insert API - limits) they are placed in the failure output table. If the original - row exceeds the BigQuery insert API limit it will be truncated when - written to the failure output table. The failure output table can be - set in the - action.deidentify.output.big_query_output.deidentified_failure_output_table - field, if no table is set, a table will be automatically created in - the same project and dataset as the original table. - - Compatible with: Inspect - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - transformation_config (google.cloud.dlp_v2.types.TransformationConfig): - User specified deidentify templates and - configs for structured, unstructured, and image - files. - transformation_details_storage_config (google.cloud.dlp_v2.types.TransformationDetailsStorageConfig): - Config for storing transformation details. This is separate - from the de-identified content, and contains metadata about - the successful transformations and/or failures that occurred - while de-identifying. This needs to be set in order for - users to access information about the status of each - transformation (see - [TransformationDetails][google.privacy.dlp.v2.TransformationDetails] - message for more information about what is noted). - cloud_storage_output (str): - Required. User settable Cloud Storage bucket - and folders to store de-identified files. This - field must be set for cloud storage - deidentification. The output Cloud Storage - bucket must be different from the input bucket. - De-identified files will overwrite files in the - output path. - Form of: gs://bucket/folder/ or gs://bucket - - This field is a member of `oneof`_ ``output``. - file_types_to_transform (MutableSequence[google.cloud.dlp_v2.types.FileType]): - List of user-specified file type groups to transform. If - specified, only the files with these filetypes will be - transformed. If empty, all supported files will be - transformed. Supported types may be automatically added over - time. If a file type is set in this field that isn't - supported by the Deidentify action then the job will fail - and will not be successfully created/started. Currently the - only filetypes supported are: IMAGES, TEXT_FILES, CSV, TSV. - """ - - transformation_config: 'TransformationConfig' = proto.Field( - proto.MESSAGE, - number=7, - message='TransformationConfig', - ) - transformation_details_storage_config: 'TransformationDetailsStorageConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='TransformationDetailsStorageConfig', - ) - cloud_storage_output: str = proto.Field( - proto.STRING, - number=9, - oneof='output', - ) - file_types_to_transform: MutableSequence[storage.FileType] = proto.RepeatedField( - proto.ENUM, - number=8, - enum=storage.FileType, - ) - - class JobNotificationEmails(proto.Message): - r"""Sends an email when the job completes. The email goes to IAM project - owners and technical `Essential - Contacts `__. - - """ - - class PublishToStackdriver(proto.Message): - r"""Enable Stackdriver metric dlp.googleapis.com/finding_count. This - will publish a metric to stack driver on each infotype requested and - how many findings were found for it. CustomDetectors will be - bucketed as 'Custom' under the Stackdriver label 'info_type'. - - """ - - save_findings: SaveFindings = proto.Field( - proto.MESSAGE, - number=1, - oneof='action', - message=SaveFindings, - ) - pub_sub: PublishToPubSub = proto.Field( - proto.MESSAGE, - number=2, - oneof='action', - message=PublishToPubSub, - ) - publish_summary_to_cscc: PublishSummaryToCscc = proto.Field( - proto.MESSAGE, - number=3, - oneof='action', - message=PublishSummaryToCscc, - ) - publish_findings_to_cloud_data_catalog: PublishFindingsToCloudDataCatalog = proto.Field( - proto.MESSAGE, - number=5, - oneof='action', - message=PublishFindingsToCloudDataCatalog, - ) - deidentify: Deidentify = proto.Field( - proto.MESSAGE, - number=7, - oneof='action', - message=Deidentify, - ) - job_notification_emails: JobNotificationEmails = proto.Field( - proto.MESSAGE, - number=8, - oneof='action', - message=JobNotificationEmails, - ) - publish_to_stackdriver: PublishToStackdriver = proto.Field( - proto.MESSAGE, - number=9, - oneof='action', - message=PublishToStackdriver, - ) - - -class TransformationConfig(proto.Message): - r"""User specified templates and configs for how to deidentify - structured, unstructures, and image files. User must provide - either a unstructured deidentify template or at least one redact - image config. - - Attributes: - deidentify_template (str): - De-identify template. If this template is specified, it will - serve as the default de-identify template. This template - cannot contain ``record_transformations`` since it can be - used for unstructured content such as free-form text files. - If this template is not set, a default - ``ReplaceWithInfoTypeConfig`` will be used to de-identify - unstructured content. - structured_deidentify_template (str): - Structured de-identify template. If this template is - specified, it will serve as the de-identify template for - structured content such as delimited files and tables. If - this template is not set but the ``deidentify_template`` is - set, then ``deidentify_template`` will also apply to the - structured content. If neither template is set, a default - ``ReplaceWithInfoTypeConfig`` will be used to de-identify - structured content. - image_redact_template (str): - Image redact template. - If this template is specified, it will serve as - the de-identify template for images. If this - template is not set, all findings in the image - will be redacted with a black box. - """ - - deidentify_template: str = proto.Field( - proto.STRING, - number=1, - ) - structured_deidentify_template: str = proto.Field( - proto.STRING, - number=2, - ) - image_redact_template: str = proto.Field( - proto.STRING, - number=4, - ) - - -class CreateInspectTemplateRequest(proto.Message): - r"""Request message for CreateInspectTemplate. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - Required. The InspectTemplate to create. - template_id (str): - The template id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - inspect_template: 'InspectTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectTemplate', - ) - template_id: str = proto.Field( - proto.STRING, - number=3, - ) - location_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class UpdateInspectTemplateRequest(proto.Message): - r"""Request message for UpdateInspectTemplate. - - Attributes: - name (str): - Required. Resource name of organization and inspectTemplate - to be updated, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - New InspectTemplate value. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - inspect_template: 'InspectTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectTemplate', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetInspectTemplateRequest(proto.Message): - r"""Request message for GetInspectTemplate. - - Attributes: - name (str): - Required. Resource name of the organization and - inspectTemplate to be read, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListInspectTemplatesRequest(proto.Message): - r"""Request message for ListInspectTemplates. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from previous call - to ``ListInspectTemplates``. - page_size (int): - Size of the page, can be limited by the - server. If zero server returns a page of max - size 100. - order_by (str): - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space - characters are insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the template was - created. - - ``update_time``: corresponds to the time the template was - last updated. - - ``name``: corresponds to the template's name. - - ``display_name``: corresponds to the template's display - name. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListInspectTemplatesResponse(proto.Message): - r"""Response message for ListInspectTemplates. - - Attributes: - inspect_templates (MutableSequence[google.cloud.dlp_v2.types.InspectTemplate]): - List of inspectTemplates, up to page_size in - ListInspectTemplatesRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in following - ListInspectTemplates request. - """ - - @property - def raw_page(self): - return self - - inspect_templates: MutableSequence['InspectTemplate'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='InspectTemplate', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteInspectTemplateRequest(proto.Message): - r"""Request message for DeleteInspectTemplate. - - Attributes: - name (str): - Required. Resource name of the organization and - inspectTemplate to be deleted, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateJobTriggerRequest(proto.Message): - r"""Request message for CreateJobTrigger. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - Required. The JobTrigger to create. - trigger_id (str): - The trigger id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - job_trigger: 'JobTrigger' = proto.Field( - proto.MESSAGE, - number=2, - message='JobTrigger', - ) - trigger_id: str = proto.Field( - proto.STRING, - number=3, - ) - location_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ActivateJobTriggerRequest(proto.Message): - r"""Request message for ActivateJobTrigger. - - Attributes: - name (str): - Required. Resource name of the trigger to activate, for - example ``projects/dlp-test-project/jobTriggers/53234423``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateJobTriggerRequest(proto.Message): - r"""Request message for UpdateJobTrigger. - - Attributes: - name (str): - Required. Resource name of the project and the triggeredJob, - for example - ``projects/dlp-test-project/jobTriggers/53234423``. - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - New JobTrigger value. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - job_trigger: 'JobTrigger' = proto.Field( - proto.MESSAGE, - number=2, - message='JobTrigger', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetJobTriggerRequest(proto.Message): - r"""Request message for GetJobTrigger. - - Attributes: - name (str): - Required. Resource name of the project and the triggeredJob, - for example - ``projects/dlp-test-project/jobTriggers/53234423``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateDlpJobRequest(proto.Message): - r"""Request message for CreateDlpJobRequest. Used to initiate - long running jobs such as calculating risk metrics or inspecting - Google Cloud Storage. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): - An inspection job scans a storage repository - for InfoTypes. - - This field is a member of `oneof`_ ``job``. - risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): - A risk analysis job calculates - re-identification risk metrics for a BigQuery - table. - - This field is a member of `oneof`_ ``job``. - job_id (str): - The job id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - inspect_job: 'InspectJobConfig' = proto.Field( - proto.MESSAGE, - number=2, - oneof='job', - message='InspectJobConfig', - ) - risk_job: 'RiskAnalysisJobConfig' = proto.Field( - proto.MESSAGE, - number=3, - oneof='job', - message='RiskAnalysisJobConfig', - ) - job_id: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListJobTriggersRequest(proto.Message): - r"""Request message for ListJobTriggers. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from previous call - to ListJobTriggers. ``order_by`` field must not change for - subsequent calls. - page_size (int): - Size of the page, can be limited by a server. - order_by (str): - Comma separated list of triggeredJob fields to order by, - followed by ``asc`` or ``desc`` postfix. This list is - case-insensitive, default sorting order is ascending, - redundant space characters are insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the JobTrigger - was created. - - ``update_time``: corresponds to the time the JobTrigger - was last updated. - - ``last_run_time``: corresponds to the last time the - JobTrigger ran. - - ``name``: corresponds to the JobTrigger's name. - - ``display_name``: corresponds to the JobTrigger's display - name. - - ``status``: corresponds to JobTrigger's status. - filter (str): - Allows filtering. - - Supported syntax: - - - Filter expressions are made up of one or more - restrictions. - - Restrictions can be combined by ``AND`` or ``OR`` logical - operators. A sequence of restrictions implicitly uses - ``AND``. - - A restriction has the form of - ``{field} {operator} {value}``. - - Supported fields/values for inspect triggers: - - - ``status`` - HEALTHY|PAUSED|CANCELLED - - ``inspected_storage`` - - DATASTORE|CLOUD_STORAGE|BIGQUERY - - 'last_run_time\` - RFC 3339 formatted timestamp, - surrounded by quotation marks. Nanoseconds are - ignored. - - 'error_count' - Number of errors that have occurred - while running. - - - The operator must be ``=`` or ``!=`` for status and - inspected_storage. - - Examples: - - - inspected_storage = cloud_storage AND status = HEALTHY - - inspected_storage = cloud_storage OR inspected_storage = - bigquery - - inspected_storage = cloud_storage AND (state = PAUSED OR - state = HEALTHY) - - last_run_time > "2017-12-12T00:00:00+00:00" - - The length of this field should be no more than 500 - characters. - type_ (google.cloud.dlp_v2.types.DlpJobType): - The type of jobs. Will use ``DlpJobType.INSPECT`` if not - set. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=5, - ) - type_: 'DlpJobType' = proto.Field( - proto.ENUM, - number=6, - enum='DlpJobType', - ) - location_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -class ListJobTriggersResponse(proto.Message): - r"""Response message for ListJobTriggers. - - Attributes: - job_triggers (MutableSequence[google.cloud.dlp_v2.types.JobTrigger]): - List of triggeredJobs, up to page_size in - ListJobTriggersRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in following - ListJobTriggers request. - """ - - @property - def raw_page(self): - return self - - job_triggers: MutableSequence['JobTrigger'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='JobTrigger', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteJobTriggerRequest(proto.Message): - r"""Request message for DeleteJobTrigger. - - Attributes: - name (str): - Required. Resource name of the project and the triggeredJob, - for example - ``projects/dlp-test-project/jobTriggers/53234423``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class InspectJobConfig(proto.Message): - r"""Controls what and how to inspect for findings. - - Attributes: - storage_config (google.cloud.dlp_v2.types.StorageConfig): - The data to scan. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - How and what to scan for. - inspect_template_name (str): - If provided, will be used as the default for all values in - InspectConfig. ``inspect_config`` will be merged into the - values persisted as part of the template. - actions (MutableSequence[google.cloud.dlp_v2.types.Action]): - Actions to execute at the completion of the - job. - """ - - storage_config: storage.StorageConfig = proto.Field( - proto.MESSAGE, - number=1, - message=storage.StorageConfig, - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - inspect_template_name: str = proto.Field( - proto.STRING, - number=3, - ) - actions: MutableSequence['Action'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='Action', - ) - - -class DataProfileAction(proto.Message): - r"""A task to execute when a data profile has been generated. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - export_data (google.cloud.dlp_v2.types.DataProfileAction.Export): - Export data profiles into a provided - location. - - This field is a member of `oneof`_ ``action``. - pub_sub_notification (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification): - Publish a message into the Pub/Sub topic. - - This field is a member of `oneof`_ ``action``. - """ - class EventType(proto.Enum): - r"""Types of event that can trigger an action. - - Values: - EVENT_TYPE_UNSPECIFIED (0): - Unused. - NEW_PROFILE (1): - New profile (not a re-profile). - CHANGED_PROFILE (2): - Changed one of the following profile metrics: - - - Table data risk score - - Table sensitivity score - - Table resource visibility - - Table encryption type - - Table predicted infoTypes - - Table other infoTypes - SCORE_INCREASED (3): - Table data risk score or sensitivity score - increased. - ERROR_CHANGED (4): - A user (non-internal) error occurred. - """ - EVENT_TYPE_UNSPECIFIED = 0 - NEW_PROFILE = 1 - CHANGED_PROFILE = 2 - SCORE_INCREASED = 3 - ERROR_CHANGED = 4 - - class Export(proto.Message): - r"""If set, the detailed data profiles will be persisted to the - location of your choice whenever updated. - - Attributes: - profile_table (google.cloud.dlp_v2.types.BigQueryTable): - Store all table and column profiles in an - existing table or a new table in an existing - dataset. Each re-generation will result in a new - row in BigQuery. - """ - - profile_table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=1, - message=storage.BigQueryTable, - ) - - class PubSubNotification(proto.Message): - r"""Send a Pub/Sub message into the given Pub/Sub topic to connect other - systems to data profile generation. The message payload data will be - the byte serialization of ``DataProfilePubSubMessage``. - - Attributes: - topic (str): - Cloud Pub/Sub topic to send notifications to. - Format is projects/{project}/topics/{topic}. - event (google.cloud.dlp_v2.types.DataProfileAction.EventType): - The type of event that triggers a Pub/Sub. At most one - ``PubSubNotification`` per EventType is permitted. - pubsub_condition (google.cloud.dlp_v2.types.DataProfilePubSubCondition): - Conditions (e.g., data risk or sensitivity - level) for triggering a Pub/Sub. - detail_of_message (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification.DetailLevel): - How much data to include in the Pub/Sub message. If the user - wishes to limit the size of the message, they can use - resource_name and fetch the profile fields they wish to. Per - table profile (not per column). - """ - class DetailLevel(proto.Enum): - r"""The levels of detail that can be included in the Pub/Sub - message. - - Values: - DETAIL_LEVEL_UNSPECIFIED (0): - Unused. - TABLE_PROFILE (1): - The full table data profile. - RESOURCE_NAME (2): - The resource name of the table. - """ - DETAIL_LEVEL_UNSPECIFIED = 0 - TABLE_PROFILE = 1 - RESOURCE_NAME = 2 - - topic: str = proto.Field( - proto.STRING, - number=1, - ) - event: 'DataProfileAction.EventType' = proto.Field( - proto.ENUM, - number=2, - enum='DataProfileAction.EventType', - ) - pubsub_condition: 'DataProfilePubSubCondition' = proto.Field( - proto.MESSAGE, - number=3, - message='DataProfilePubSubCondition', - ) - detail_of_message: 'DataProfileAction.PubSubNotification.DetailLevel' = proto.Field( - proto.ENUM, - number=4, - enum='DataProfileAction.PubSubNotification.DetailLevel', - ) - - export_data: Export = proto.Field( - proto.MESSAGE, - number=1, - oneof='action', - message=Export, - ) - pub_sub_notification: PubSubNotification = proto.Field( - proto.MESSAGE, - number=2, - oneof='action', - message=PubSubNotification, - ) - - -class DataProfileJobConfig(proto.Message): - r"""Configuration for setting up a job to scan resources for profile - generation. Only one data profile configuration may exist per - organization, folder, or project. - - The generated data profiles are retained according to the [data - retention policy] - (https://cloud.google.com/dlp/docs/data-profiles#retention). - - Attributes: - location (google.cloud.dlp_v2.types.DataProfileLocation): - The data to scan. - project_id (str): - The project that will run the scan. The DLP - service account that exists within this project - must have access to all resources that are - profiled, and the Cloud DLP API must be enabled. - inspect_templates (MutableSequence[str]): - Detection logic for profile generation. - - Not all template features are used by profiles. - FindingLimits, include_quote and exclude_info_types have no - impact on data profiling. - - Multiple templates may be provided if there is data in - multiple regions. At most one template must be specified - per-region (including "global"). Each region is scanned - using the applicable template. If no region-specific - template is specified, but a "global" template is specified, - it will be copied to that region and used instead. If no - global or region-specific template is provided for a region - with data, that region's data will not be scanned. - - For more information, see - https://cloud.google.com/dlp/docs/data-profiles#data_residency. - data_profile_actions (MutableSequence[google.cloud.dlp_v2.types.DataProfileAction]): - Actions to execute at the completion of the - job. - """ - - location: 'DataProfileLocation' = proto.Field( - proto.MESSAGE, - number=1, - message='DataProfileLocation', - ) - project_id: str = proto.Field( - proto.STRING, - number=5, - ) - inspect_templates: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - data_profile_actions: MutableSequence['DataProfileAction'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='DataProfileAction', - ) - - -class DataProfileLocation(proto.Message): - r"""The data that will be profiled. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - organization_id (int): - The ID of an organization to scan. - - This field is a member of `oneof`_ ``location``. - folder_id (int): - The ID of the Folder within an organization - to scan. - - This field is a member of `oneof`_ ``location``. - """ - - organization_id: int = proto.Field( - proto.INT64, - number=1, - oneof='location', - ) - folder_id: int = proto.Field( - proto.INT64, - number=2, - oneof='location', - ) - - -class DlpJob(proto.Message): - r"""Combines all of the information about a DLP job. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - The server-assigned name. - type_ (google.cloud.dlp_v2.types.DlpJobType): - The type of job. - state (google.cloud.dlp_v2.types.DlpJob.JobState): - State of a job. - risk_details (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails): - Results from analyzing risk of a data source. - - This field is a member of `oneof`_ ``details``. - inspect_details (google.cloud.dlp_v2.types.InspectDataSourceDetails): - Results from inspecting a data source. - - This field is a member of `oneof`_ ``details``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the job was created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the job started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the job finished. - job_trigger_name (str): - If created by a job trigger, the resource - name of the trigger that instantiated the job. - errors (MutableSequence[google.cloud.dlp_v2.types.Error]): - A stream of errors encountered running the - job. - """ - class JobState(proto.Enum): - r"""Possible states of a job. New items may be added. - - Values: - JOB_STATE_UNSPECIFIED (0): - Unused. - PENDING (1): - The job has not yet started. - RUNNING (2): - The job is currently running. Once a job has - finished it will transition to FAILED or DONE. - DONE (3): - The job is no longer running. - CANCELED (4): - The job was canceled before it could be - completed. - FAILED (5): - The job had an error and did not complete. - ACTIVE (6): - The job is currently accepting findings via - hybridInspect. A hybrid job in ACTIVE state may - continue to have findings added to it through - the calling of hybridInspect. After the job has - finished no more calls to hybridInspect may be - made. ACTIVE jobs can transition to DONE. - """ - JOB_STATE_UNSPECIFIED = 0 - PENDING = 1 - RUNNING = 2 - DONE = 3 - CANCELED = 4 - FAILED = 5 - ACTIVE = 6 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'DlpJobType' = proto.Field( - proto.ENUM, - number=2, - enum='DlpJobType', - ) - state: JobState = proto.Field( - proto.ENUM, - number=3, - enum=JobState, - ) - risk_details: 'AnalyzeDataSourceRiskDetails' = proto.Field( - proto.MESSAGE, - number=4, - oneof='details', - message='AnalyzeDataSourceRiskDetails', - ) - inspect_details: 'InspectDataSourceDetails' = proto.Field( - proto.MESSAGE, - number=5, - oneof='details', - message='InspectDataSourceDetails', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - job_trigger_name: str = proto.Field( - proto.STRING, - number=10, - ) - errors: MutableSequence['Error'] = proto.RepeatedField( - proto.MESSAGE, - number=11, - message='Error', - ) - - -class GetDlpJobRequest(proto.Message): - r"""The request message for [DlpJobs.GetDlpJob][]. - - Attributes: - name (str): - Required. The name of the DlpJob resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDlpJobsRequest(proto.Message): - r"""The request message for listing DLP jobs. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - filter (str): - Allows filtering. - - Supported syntax: - - - Filter expressions are made up of one or more - restrictions. - - Restrictions can be combined by ``AND`` or ``OR`` logical - operators. A sequence of restrictions implicitly uses - ``AND``. - - A restriction has the form of - ``{field} {operator} {value}``. - - Supported fields/values for inspect jobs: - - - ``state`` - PENDING|RUNNING|CANCELED|FINISHED|FAILED - - ``inspected_storage`` - - DATASTORE|CLOUD_STORAGE|BIGQUERY - - ``trigger_name`` - The name of the trigger that - created the job. - - 'end_time\` - Corresponds to the time the job - finished. - - 'start_time\` - Corresponds to the time the job - finished. - - - Supported fields for risk analysis jobs: - - - ``state`` - RUNNING|CANCELED|FINISHED|FAILED - - 'end_time\` - Corresponds to the time the job - finished. - - 'start_time\` - Corresponds to the time the job - finished. - - - The operator must be ``=`` or ``!=``. - - Examples: - - - inspected_storage = cloud_storage AND state = done - - inspected_storage = cloud_storage OR inspected_storage = - bigquery - - inspected_storage = cloud_storage AND (state = done OR - state = canceled) - - end_time > "2017-12-12T00:00:00+00:00" - - The length of this field should be no more than 500 - characters. - page_size (int): - The standard list page size. - page_token (str): - The standard list page token. - type_ (google.cloud.dlp_v2.types.DlpJobType): - The type of job. Defaults to ``DlpJobType.INSPECT`` - order_by (str): - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space - characters are insignificant. - - Example: ``name asc, end_time asc, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the job was - created. - - ``end_time``: corresponds to the time the job ended. - - ``name``: corresponds to the job's name. - - ``state``: corresponds to ``state`` - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - type_: 'DlpJobType' = proto.Field( - proto.ENUM, - number=5, - enum='DlpJobType', - ) - order_by: str = proto.Field( - proto.STRING, - number=6, - ) - location_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -class ListDlpJobsResponse(proto.Message): - r"""The response message for listing DLP jobs. - - Attributes: - jobs (MutableSequence[google.cloud.dlp_v2.types.DlpJob]): - A list of DlpJobs that matches the specified - filter in the request. - next_page_token (str): - The standard List next-page token. - """ - - @property - def raw_page(self): - return self - - jobs: MutableSequence['DlpJob'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DlpJob', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CancelDlpJobRequest(proto.Message): - r"""The request message for canceling a DLP job. - - Attributes: - name (str): - Required. The name of the DlpJob resource to - be cancelled. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class FinishDlpJobRequest(proto.Message): - r"""The request message for finishing a DLP hybrid job. - - Attributes: - name (str): - Required. The name of the DlpJob resource to - be cancelled. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteDlpJobRequest(proto.Message): - r"""The request message for deleting a DLP job. - - Attributes: - name (str): - Required. The name of the DlpJob resource to - be deleted. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateDeidentifyTemplateRequest(proto.Message): - r"""Request message for CreateDeidentifyTemplate. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - Required. The DeidentifyTemplate to create. - template_id (str): - The template id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - deidentify_template: 'DeidentifyTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyTemplate', - ) - template_id: str = proto.Field( - proto.STRING, - number=3, - ) - location_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class UpdateDeidentifyTemplateRequest(proto.Message): - r"""Request message for UpdateDeidentifyTemplate. - - Attributes: - name (str): - Required. Resource name of organization and deidentify - template to be updated, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - New DeidentifyTemplate value. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - deidentify_template: 'DeidentifyTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyTemplate', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetDeidentifyTemplateRequest(proto.Message): - r"""Request message for GetDeidentifyTemplate. - - Attributes: - name (str): - Required. Resource name of the organization and deidentify - template to be read, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDeidentifyTemplatesRequest(proto.Message): - r"""Request message for ListDeidentifyTemplates. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from previous call - to ``ListDeidentifyTemplates``. - page_size (int): - Size of the page, can be limited by the - server. If zero server returns a page of max - size 100. - order_by (str): - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space - characters are insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the template was - created. - - ``update_time``: corresponds to the time the template was - last updated. - - ``name``: corresponds to the template's name. - - ``display_name``: corresponds to the template's display - name. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListDeidentifyTemplatesResponse(proto.Message): - r"""Response message for ListDeidentifyTemplates. - - Attributes: - deidentify_templates (MutableSequence[google.cloud.dlp_v2.types.DeidentifyTemplate]): - List of deidentify templates, up to page_size in - ListDeidentifyTemplatesRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in following - ListDeidentifyTemplates request. - """ - - @property - def raw_page(self): - return self - - deidentify_templates: MutableSequence['DeidentifyTemplate'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DeidentifyTemplate', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteDeidentifyTemplateRequest(proto.Message): - r"""Request message for DeleteDeidentifyTemplate. - - Attributes: - name (str): - Required. Resource name of the organization and deidentify - template to be deleted, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class LargeCustomDictionaryConfig(proto.Message): - r"""Configuration for a custom dictionary created from a data source of - any size up to the maximum size defined in the - `limits `__ page. The artifacts - of dictionary creation are stored in the specified Cloud Storage - location. Consider using ``CustomInfoType.Dictionary`` for smaller - dictionaries that satisfy the size requirements. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - output_path (google.cloud.dlp_v2.types.CloudStoragePath): - Location to store dictionary artifacts in - Cloud Storage. These files will only be - accessible by project owners and the DLP API. If - any of these artifacts are modified, the - dictionary is considered invalid and can no - longer be used. - cloud_storage_file_set (google.cloud.dlp_v2.types.CloudStorageFileSet): - Set of files containing newline-delimited - lists of dictionary phrases. - - This field is a member of `oneof`_ ``source``. - big_query_field (google.cloud.dlp_v2.types.BigQueryField): - Field in a BigQuery table where each cell - represents a dictionary phrase. - - This field is a member of `oneof`_ ``source``. - """ - - output_path: storage.CloudStoragePath = proto.Field( - proto.MESSAGE, - number=1, - message=storage.CloudStoragePath, - ) - cloud_storage_file_set: storage.CloudStorageFileSet = proto.Field( - proto.MESSAGE, - number=2, - oneof='source', - message=storage.CloudStorageFileSet, - ) - big_query_field: storage.BigQueryField = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message=storage.BigQueryField, - ) - - -class LargeCustomDictionaryStats(proto.Message): - r"""Summary statistics of a custom dictionary. - - Attributes: - approx_num_phrases (int): - Approximate number of distinct phrases in the - dictionary. - """ - - approx_num_phrases: int = proto.Field( - proto.INT64, - number=1, - ) - - -class StoredInfoTypeConfig(proto.Message): - r"""Configuration for stored infoTypes. All fields and subfield - are provided by the user. For more information, see - https://cloud.google.com/dlp/docs/creating-custom-infotypes. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - display_name (str): - Display name of the StoredInfoType (max 256 - characters). - description (str): - Description of the StoredInfoType (max 256 - characters). - large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryConfig): - StoredInfoType where findings are defined by - a dictionary of phrases. - - This field is a member of `oneof`_ ``type``. - dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): - Store dictionary-based CustomInfoType. - - This field is a member of `oneof`_ ``type``. - regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Store regular expression-based - StoredInfoType. - - This field is a member of `oneof`_ ``type``. - """ - - display_name: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - large_custom_dictionary: 'LargeCustomDictionaryConfig' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='LargeCustomDictionaryConfig', - ) - dictionary: storage.CustomInfoType.Dictionary = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message=storage.CustomInfoType.Dictionary, - ) - regex: storage.CustomInfoType.Regex = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message=storage.CustomInfoType.Regex, - ) - - -class StoredInfoTypeStats(proto.Message): - r"""Statistics for a StoredInfoType. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryStats): - StoredInfoType where findings are defined by - a dictionary of phrases. - - This field is a member of `oneof`_ ``type``. - """ - - large_custom_dictionary: 'LargeCustomDictionaryStats' = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message='LargeCustomDictionaryStats', - ) - - -class StoredInfoTypeVersion(proto.Message): - r"""Version of a StoredInfoType, including the configuration used - to build it, create timestamp, and current state. - - Attributes: - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - StoredInfoType configuration. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Create timestamp of the version. Read-only, - determined by the system when the version is - created. - state (google.cloud.dlp_v2.types.StoredInfoTypeState): - Stored info type version state. Read-only, - updated by the system during dictionary - creation. - errors (MutableSequence[google.cloud.dlp_v2.types.Error]): - Errors that occurred when creating this storedInfoType - version, or anomalies detected in the storedInfoType data - that render it unusable. Only the five most recent errors - will be displayed, with the most recent error appearing - first. - - For example, some of the data for stored custom dictionaries - is put in the user's Cloud Storage bucket, and if this data - is modified or deleted by the user or another system, the - dictionary becomes invalid. - - If any errors occur, fix the problem indicated by the error - message and use the UpdateStoredInfoType API method to - create another version of the storedInfoType to continue - using it, reusing the same ``config`` if it was not the - source of the error. - stats (google.cloud.dlp_v2.types.StoredInfoTypeStats): - Statistics about this storedInfoType version. - """ - - config: 'StoredInfoTypeConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='StoredInfoTypeConfig', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - state: 'StoredInfoTypeState' = proto.Field( - proto.ENUM, - number=3, - enum='StoredInfoTypeState', - ) - errors: MutableSequence['Error'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='Error', - ) - stats: 'StoredInfoTypeStats' = proto.Field( - proto.MESSAGE, - number=5, - message='StoredInfoTypeStats', - ) - - -class StoredInfoType(proto.Message): - r"""StoredInfoType resource message that contains information - about the current version and any pending updates. - - Attributes: - name (str): - Resource name. - current_version (google.cloud.dlp_v2.types.StoredInfoTypeVersion): - Current version of the stored info type. - pending_versions (MutableSequence[google.cloud.dlp_v2.types.StoredInfoTypeVersion]): - Pending versions of the stored info type. - Empty if no versions are pending. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - current_version: 'StoredInfoTypeVersion' = proto.Field( - proto.MESSAGE, - number=2, - message='StoredInfoTypeVersion', - ) - pending_versions: MutableSequence['StoredInfoTypeVersion'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='StoredInfoTypeVersion', - ) - - -class CreateStoredInfoTypeRequest(proto.Message): - r"""Request message for CreateStoredInfoType. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Required. Configuration of the storedInfoType - to create. - stored_info_type_id (str): - The storedInfoType ID can contain uppercase and lowercase - letters, numbers, and hyphens; that is, it must match the - regular expression: ``[a-zA-Z\d-_]+``. The maximum length is - 100 characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - config: 'StoredInfoTypeConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='StoredInfoTypeConfig', - ) - stored_info_type_id: str = proto.Field( - proto.STRING, - number=3, - ) - location_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class UpdateStoredInfoTypeRequest(proto.Message): - r"""Request message for UpdateStoredInfoType. - - Attributes: - name (str): - Required. Resource name of organization and storedInfoType - to be updated, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Updated configuration for the storedInfoType. - If not provided, a new version of the - storedInfoType will be created with the existing - configuration. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - config: 'StoredInfoTypeConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='StoredInfoTypeConfig', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetStoredInfoTypeRequest(proto.Message): - r"""Request message for GetStoredInfoType. - - Attributes: - name (str): - Required. Resource name of the organization and - storedInfoType to be read, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListStoredInfoTypesRequest(proto.Message): - r"""Request message for ListStoredInfoTypes. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from previous call - to ``ListStoredInfoTypes``. - page_size (int): - Size of the page, can be limited by the - server. If zero server returns a page of max - size 100. - order_by (str): - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space - characters are insignificant. - - Example: ``name asc, display_name, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the most recent - version of the resource was created. - - ``state``: corresponds to the state of the resource. - - ``name``: corresponds to resource name. - - ``display_name``: corresponds to info type's display - name. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListStoredInfoTypesResponse(proto.Message): - r"""Response message for ListStoredInfoTypes. - - Attributes: - stored_info_types (MutableSequence[google.cloud.dlp_v2.types.StoredInfoType]): - List of storedInfoTypes, up to page_size in - ListStoredInfoTypesRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in following - ListStoredInfoTypes request. - """ - - @property - def raw_page(self): - return self - - stored_info_types: MutableSequence['StoredInfoType'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='StoredInfoType', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteStoredInfoTypeRequest(proto.Message): - r"""Request message for DeleteStoredInfoType. - - Attributes: - name (str): - Required. Resource name of the organization and - storedInfoType to be deleted, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class HybridInspectJobTriggerRequest(proto.Message): - r"""Request to search for potentially sensitive info in a custom - location. - - Attributes: - name (str): - Required. Resource name of the trigger to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): - The item to inspect. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - hybrid_item: 'HybridContentItem' = proto.Field( - proto.MESSAGE, - number=3, - message='HybridContentItem', - ) - - -class HybridInspectDlpJobRequest(proto.Message): - r"""Request to search for potentially sensitive info in a custom - location. - - Attributes: - name (str): - Required. Resource name of the job to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/dlpJob/53234423``. - hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): - The item to inspect. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - hybrid_item: 'HybridContentItem' = proto.Field( - proto.MESSAGE, - number=3, - message='HybridContentItem', - ) - - -class HybridContentItem(proto.Message): - r"""An individual hybrid item to inspect. Will be stored - temporarily during processing. - - Attributes: - item (google.cloud.dlp_v2.types.ContentItem): - The item to inspect. - finding_details (google.cloud.dlp_v2.types.HybridFindingDetails): - Supplementary information that will be added - to each finding. - """ - - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=1, - message='ContentItem', - ) - finding_details: 'HybridFindingDetails' = proto.Field( - proto.MESSAGE, - number=2, - message='HybridFindingDetails', - ) - - -class HybridFindingDetails(proto.Message): - r"""Populate to associate additional data with each finding. - - Attributes: - container_details (google.cloud.dlp_v2.types.Container): - Details about the container where the content - being inspected is from. - file_offset (int): - Offset in bytes of the line, from the - beginning of the file, where the finding is - located. Populate if the item being scanned is - only part of a bigger item, such as a shard of a - file and you want to track the absolute position - of the finding. - row_offset (int): - Offset of the row for tables. Populate if the - row(s) being scanned are part of a bigger - dataset and you want to keep track of their - absolute position. - table_options (google.cloud.dlp_v2.types.TableOptions): - If the container is a table, additional information to make - findings meaningful such as the columns that are primary - keys. If not known ahead of time, can also be set within - each inspect hybrid call and the two will be merged. Note - that identifying_fields will only be stored to BigQuery, and - only if the BigQuery action has been included. - labels (MutableMapping[str, str]): - Labels to represent user provided metadata about the data - being inspected. If configured by the job, some key values - may be required. The labels associated with ``Finding``'s - produced by hybrid inspection. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 10 labels can be associated with a given - finding. - - Examples: - - - ``"environment" : "production"`` - - ``"pipeline" : "etl"`` - """ - - container_details: 'Container' = proto.Field( - proto.MESSAGE, - number=1, - message='Container', - ) - file_offset: int = proto.Field( - proto.INT64, - number=2, - ) - row_offset: int = proto.Field( - proto.INT64, - number=3, - ) - table_options: storage.TableOptions = proto.Field( - proto.MESSAGE, - number=4, - message=storage.TableOptions, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - - -class HybridInspectResponse(proto.Message): - r"""Quota exceeded errors will be thrown once quota has been met. - """ - - -class DataRiskLevel(proto.Message): - r"""Score is a summary of all elements in the data profile. - A higher number means more risk. - - Attributes: - score (google.cloud.dlp_v2.types.DataRiskLevel.DataRiskLevelScore): - The score applied to the resource. - """ - class DataRiskLevelScore(proto.Enum): - r"""Various score levels for resources. - - Values: - RISK_SCORE_UNSPECIFIED (0): - Unused. - RISK_LOW (10): - Low risk - Lower indication of sensitive data - that appears to have additional access - restrictions in place or no indication of - sensitive data found. - RISK_MODERATE (20): - Medium risk - Sensitive data may be present - but additional access or fine grain access - restrictions appear to be present. Consider - limiting access even further or transform data - to mask. - RISK_HIGH (30): - High risk – SPII may be present. Access - controls may include public ACLs. Exfiltration - of data may lead to user data loss. - Re-identification of users may be possible. - Consider limiting usage and or removing SPII. - """ - RISK_SCORE_UNSPECIFIED = 0 - RISK_LOW = 10 - RISK_MODERATE = 20 - RISK_HIGH = 30 - - score: DataRiskLevelScore = proto.Field( - proto.ENUM, - number=1, - enum=DataRiskLevelScore, - ) - - -class DataProfileConfigSnapshot(proto.Message): - r"""Snapshot of the configurations used to generate the profile. - - Attributes: - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - A copy of the inspection config used to generate this - profile. This is a copy of the inspect_template specified in - ``DataProfileJobConfig``. - data_profile_job (google.cloud.dlp_v2.types.DataProfileJobConfig): - A copy of the configuration used to generate - this profile. - """ - - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - data_profile_job: 'DataProfileJobConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='DataProfileJobConfig', - ) - - -class TableDataProfile(proto.Message): - r"""The profile for a scanned table. - - Attributes: - name (str): - The name of the profile. - project_data_profile (str): - The resource name to the project data profile - for this table. - dataset_project_id (str): - The GCP project ID that owns the BigQuery - dataset. - dataset_location (str): - The BigQuery location where the dataset's - data is stored. See - https://cloud.google.com/bigquery/docs/locations - for supported locations. - dataset_id (str): - The BigQuery dataset ID. - table_id (str): - The BigQuery table ID. - full_resource (str): - The resource name of the table. - https://cloud.google.com/apis/design/resource_names#full_resource_name - profile_status (google.cloud.dlp_v2.types.ProfileStatus): - Success or error status from the most recent - profile generation attempt. May be empty if the - profile is still being generated. - state (google.cloud.dlp_v2.types.TableDataProfile.State): - State of a profile. - sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): - The sensitivity score of this table. - data_risk_level (google.cloud.dlp_v2.types.DataRiskLevel): - The data risk level of this table. - predicted_info_types (MutableSequence[google.cloud.dlp_v2.types.InfoTypeSummary]): - The infoTypes predicted from this table's - data. - other_info_types (MutableSequence[google.cloud.dlp_v2.types.OtherInfoTypeSummary]): - Other infoTypes found in this table's data. - config_snapshot (google.cloud.dlp_v2.types.DataProfileConfigSnapshot): - The snapshot of the configurations used to - generate the profile. - last_modified_time (google.protobuf.timestamp_pb2.Timestamp): - The time when this table was last modified - expiration_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The time when this table expires. - scanned_column_count (int): - The number of columns profiled in the table. - failed_column_count (int): - The number of columns skipped in the table - because of an error. - table_size_bytes (int): - The size of the table when the profile was - generated. - row_count (int): - Number of rows in the table when the profile - was generated. This will not be populated for - BigLake tables. - encryption_status (google.cloud.dlp_v2.types.EncryptionStatus): - How the table is encrypted. - resource_visibility (google.cloud.dlp_v2.types.ResourceVisibility): - How broadly a resource has been shared. - profile_last_generated (google.protobuf.timestamp_pb2.Timestamp): - The last time the profile was generated. - resource_labels (MutableMapping[str, str]): - The labels applied to the resource at the - time the profile was generated. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the table was created. - """ - class State(proto.Enum): - r"""Possible states of a profile. New items may be added. - - Values: - STATE_UNSPECIFIED (0): - Unused. - RUNNING (1): - The profile is currently running. Once a - profile has finished it will transition to DONE. - DONE (2): - The profile is no longer generating. If - profile_status.status.code is 0, the profile succeeded, - otherwise, it failed. - """ - STATE_UNSPECIFIED = 0 - RUNNING = 1 - DONE = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - project_data_profile: str = proto.Field( - proto.STRING, - number=2, - ) - dataset_project_id: str = proto.Field( - proto.STRING, - number=24, - ) - dataset_location: str = proto.Field( - proto.STRING, - number=29, - ) - dataset_id: str = proto.Field( - proto.STRING, - number=25, - ) - table_id: str = proto.Field( - proto.STRING, - number=26, - ) - full_resource: str = proto.Field( - proto.STRING, - number=3, - ) - profile_status: 'ProfileStatus' = proto.Field( - proto.MESSAGE, - number=21, - message='ProfileStatus', - ) - state: State = proto.Field( - proto.ENUM, - number=22, - enum=State, - ) - sensitivity_score: storage.SensitivityScore = proto.Field( - proto.MESSAGE, - number=5, - message=storage.SensitivityScore, - ) - data_risk_level: 'DataRiskLevel' = proto.Field( - proto.MESSAGE, - number=6, - message='DataRiskLevel', - ) - predicted_info_types: MutableSequence['InfoTypeSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=27, - message='InfoTypeSummary', - ) - other_info_types: MutableSequence['OtherInfoTypeSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=28, - message='OtherInfoTypeSummary', - ) - config_snapshot: 'DataProfileConfigSnapshot' = proto.Field( - proto.MESSAGE, - number=7, - message='DataProfileConfigSnapshot', - ) - last_modified_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - expiration_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - scanned_column_count: int = proto.Field( - proto.INT64, - number=10, - ) - failed_column_count: int = proto.Field( - proto.INT64, - number=11, - ) - table_size_bytes: int = proto.Field( - proto.INT64, - number=12, - ) - row_count: int = proto.Field( - proto.INT64, - number=13, - ) - encryption_status: 'EncryptionStatus' = proto.Field( - proto.ENUM, - number=14, - enum='EncryptionStatus', - ) - resource_visibility: 'ResourceVisibility' = proto.Field( - proto.ENUM, - number=15, - enum='ResourceVisibility', - ) - profile_last_generated: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=16, - message=timestamp_pb2.Timestamp, - ) - resource_labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=17, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=23, - message=timestamp_pb2.Timestamp, - ) - - -class ProfileStatus(proto.Message): - r""" - - Attributes: - status (google.rpc.status_pb2.Status): - Profiling status code and optional message - timestamp (google.protobuf.timestamp_pb2.Timestamp): - Time when the profile generation status was - updated - """ - - status: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=1, - message=status_pb2.Status, - ) - timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class InfoTypeSummary(proto.Message): - r"""The infoType details for this column. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - The infoType. - estimated_prevalence (int): - Not populated for predicted infotypes. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - estimated_prevalence: int = proto.Field( - proto.INT32, - number=2, - ) - - -class OtherInfoTypeSummary(proto.Message): - r"""Infotype details for other infoTypes found within a column. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - The other infoType. - estimated_prevalence (int): - Approximate percentage of non-null rows that - contained data detected by this infotype. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - estimated_prevalence: int = proto.Field( - proto.INT32, - number=2, - ) - - -class DataProfilePubSubCondition(proto.Message): - r"""A condition for determining whether a Pub/Sub should be - triggered. - - Attributes: - expressions (google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubExpressions): - An expression. - """ - class ProfileScoreBucket(proto.Enum): - r"""Various score levels for resources. - - Values: - PROFILE_SCORE_BUCKET_UNSPECIFIED (0): - Unused. - HIGH (1): - High risk/sensitivity detected. - MEDIUM_OR_HIGH (2): - Medium or high risk/sensitivity detected. - """ - PROFILE_SCORE_BUCKET_UNSPECIFIED = 0 - HIGH = 1 - MEDIUM_OR_HIGH = 2 - - class PubSubCondition(proto.Message): - r"""A condition consisting of a value. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - minimum_risk_score (google.cloud.dlp_v2.types.DataProfilePubSubCondition.ProfileScoreBucket): - The minimum data risk score that triggers the - condition. - - This field is a member of `oneof`_ ``value``. - minimum_sensitivity_score (google.cloud.dlp_v2.types.DataProfilePubSubCondition.ProfileScoreBucket): - The minimum sensitivity level that triggers - the condition. - - This field is a member of `oneof`_ ``value``. - """ - - minimum_risk_score: 'DataProfilePubSubCondition.ProfileScoreBucket' = proto.Field( - proto.ENUM, - number=1, - oneof='value', - enum='DataProfilePubSubCondition.ProfileScoreBucket', - ) - minimum_sensitivity_score: 'DataProfilePubSubCondition.ProfileScoreBucket' = proto.Field( - proto.ENUM, - number=2, - oneof='value', - enum='DataProfilePubSubCondition.ProfileScoreBucket', - ) - - class PubSubExpressions(proto.Message): - r"""An expression, consisting of an operator and conditions. - - Attributes: - logical_operator (google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator): - The operator to apply to the collection of - conditions. - conditions (MutableSequence[google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubCondition]): - Conditions to apply to the expression. - """ - class PubSubLogicalOperator(proto.Enum): - r"""Logical operators for conditional checks. - - Values: - LOGICAL_OPERATOR_UNSPECIFIED (0): - Unused. - OR (1): - Conditional OR. - AND (2): - Conditional AND. - """ - LOGICAL_OPERATOR_UNSPECIFIED = 0 - OR = 1 - AND = 2 - - logical_operator: 'DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator' = proto.Field( - proto.ENUM, - number=1, - enum='DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator', - ) - conditions: MutableSequence['DataProfilePubSubCondition.PubSubCondition'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='DataProfilePubSubCondition.PubSubCondition', - ) - - expressions: PubSubExpressions = proto.Field( - proto.MESSAGE, - number=1, - message=PubSubExpressions, - ) - - -class DataProfilePubSubMessage(proto.Message): - r"""Pub/Sub topic message for a - DataProfileAction.PubSubNotification event. To receive a message - of protocol buffer schema type, convert the message data to an - object of this proto class. - - Attributes: - profile (google.cloud.dlp_v2.types.TableDataProfile): - If ``DetailLevel`` is ``TABLE_PROFILE`` this will be fully - populated. Otherwise, if ``DetailLevel`` is - ``RESOURCE_NAME``, then only ``name`` and ``full_resource`` - will be populated. - event (google.cloud.dlp_v2.types.DataProfileAction.EventType): - The event that caused the Pub/Sub message to - be sent. - """ - - profile: 'TableDataProfile' = proto.Field( - proto.MESSAGE, - number=1, - message='TableDataProfile', - ) - event: 'DataProfileAction.EventType' = proto.Field( - proto.ENUM, - number=2, - enum='DataProfileAction.EventType', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py deleted file mode 100644 index fdb81846..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py +++ /dev/null @@ -1,1474 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.privacy.dlp.v2', - manifest={ - 'Likelihood', - 'FileType', - 'InfoType', - 'SensitivityScore', - 'StoredType', - 'CustomInfoType', - 'FieldId', - 'PartitionId', - 'KindExpression', - 'DatastoreOptions', - 'CloudStorageRegexFileSet', - 'CloudStorageOptions', - 'CloudStorageFileSet', - 'CloudStoragePath', - 'BigQueryOptions', - 'StorageConfig', - 'HybridOptions', - 'BigQueryKey', - 'DatastoreKey', - 'Key', - 'RecordKey', - 'BigQueryTable', - 'BigQueryField', - 'EntityId', - 'TableOptions', - }, -) - - -class Likelihood(proto.Enum): - r"""Categorization of results based on how likely they are to - represent a match, based on the number of elements they contain - which imply a match. - - Values: - LIKELIHOOD_UNSPECIFIED (0): - Default value; same as POSSIBLE. - VERY_UNLIKELY (1): - Few matching elements. - UNLIKELY (2): - No description available. - POSSIBLE (3): - Some matching elements. - LIKELY (4): - No description available. - VERY_LIKELY (5): - Many matching elements. - """ - LIKELIHOOD_UNSPECIFIED = 0 - VERY_UNLIKELY = 1 - UNLIKELY = 2 - POSSIBLE = 3 - LIKELY = 4 - VERY_LIKELY = 5 - - -class FileType(proto.Enum): - r"""Definitions of file type groups to scan. New types will be - added to this list. - - Values: - FILE_TYPE_UNSPECIFIED (0): - Includes all files. - BINARY_FILE (1): - Includes all file extensions not covered by another entry. - Binary scanning attempts to convert the content of the file - to utf_8 to scan the file. If you wish to avoid this fall - back, specify one or more of the other FileType's in your - storage scan. - TEXT_FILE (2): - Included file extensions: - asc,asp, aspx, brf, c, cc,cfm, cgi, cpp, csv, - cxx, c++, cs, css, dart, dat, dot, eml,, - epbub, ged, go, h, hh, hpp, hxx, h++, hs, html, - htm, mkd, markdown, m, ml, mli, perl, pl, - plist, pm, php, phtml, pht, properties, py, - pyw, rb, rbw, rs, rss, rc, scala, sh, sql, - swift, tex, shtml, shtm, xhtml, lhs, ics, ini, - java, js, json, kix, kml, ocaml, md, txt, - text, tsv, vb, vcard, vcs, wml, xcodeproj, xml, - xsl, xsd, yml, yaml. - IMAGE (3): - Included file extensions: bmp, gif, jpg, jpeg, jpe, png. - bytes_limit_per_file has no effect on image files. Image - inspection is restricted to 'global', 'us', 'asia', and - 'europe'. - WORD (5): - Word files >30 MB will be scanned as binary - files. Included file extensions: - docx, dotx, docm, dotm - PDF (6): - PDF files >30 MB will be scanned as binary - files. Included file extensions: - pdf - AVRO (7): - Included file extensions: - avro - CSV (8): - Included file extensions: - csv - TSV (9): - Included file extensions: - tsv - POWERPOINT (11): - Powerpoint files >30 MB will be scanned as - binary files. Included file extensions: - pptx, pptm, potx, potm, pot - EXCEL (12): - Excel files >30 MB will be scanned as binary - files. Included file extensions: - xlsx, xlsm, xltx, xltm - """ - FILE_TYPE_UNSPECIFIED = 0 - BINARY_FILE = 1 - TEXT_FILE = 2 - IMAGE = 3 - WORD = 5 - PDF = 6 - AVRO = 7 - CSV = 8 - TSV = 9 - POWERPOINT = 11 - EXCEL = 12 - - -class InfoType(proto.Message): - r"""Type of information detected by the API. - - Attributes: - name (str): - Name of the information type. Either a name of your choosing - when creating a CustomInfoType, or one of the names listed - at https://cloud.google.com/dlp/docs/infotypes-reference - when specifying a built-in type. When sending Cloud DLP - results to Data Catalog, infoType names should conform to - the pattern ``[A-Za-z0-9$_-]{1,64}``. - version (str): - Optional version name for this InfoType. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - version: str = proto.Field( - proto.STRING, - number=2, - ) - - -class SensitivityScore(proto.Message): - r"""Score is a summary of all elements in the data profile. - A higher number means more sensitive. - - Attributes: - score (google.cloud.dlp_v2.types.SensitivityScore.SensitivityScoreLevel): - The score applied to the resource. - """ - class SensitivityScoreLevel(proto.Enum): - r"""Various score levels for resources. - - Values: - SENSITIVITY_SCORE_UNSPECIFIED (0): - Unused. - SENSITIVITY_LOW (10): - No sensitive information detected. Limited - access. - SENSITIVITY_MODERATE (20): - Medium risk - PII, potentially sensitive - data, or fields with free-text data that are at - higher risk of having intermittent sensitive - data. Consider limiting access. - SENSITIVITY_HIGH (30): - High risk – SPII may be present. Exfiltration - of data may lead to user data loss. - Re-identification of users may be possible. - Consider limiting usage and or removing SPII. - """ - SENSITIVITY_SCORE_UNSPECIFIED = 0 - SENSITIVITY_LOW = 10 - SENSITIVITY_MODERATE = 20 - SENSITIVITY_HIGH = 30 - - score: SensitivityScoreLevel = proto.Field( - proto.ENUM, - number=1, - enum=SensitivityScoreLevel, - ) - - -class StoredType(proto.Message): - r"""A reference to a StoredInfoType to use with scanning. - - Attributes: - name (str): - Resource name of the requested ``StoredInfoType``, for - example - ``organizations/433245324/storedInfoTypes/432452342`` or - ``projects/project-id/storedInfoTypes/432452342``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp indicating when the version of the - ``StoredInfoType`` used for inspection was created. - Output-only field, populated by the system. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class CustomInfoType(proto.Message): - r"""Custom information type provided by the user. Used to find - domain-specific sensitive information configurable to the data - in question. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - CustomInfoType can either be a new infoType, or an extension - of built-in infoType, when the name matches one of existing - infoTypes and that infoType is specified in - ``InspectContent.info_types`` field. Specifying the latter - adds findings to the one detected by the system. If built-in - info type is not specified in ``InspectContent.info_types`` - list then the name is treated as a custom info type. - likelihood (google.cloud.dlp_v2.types.Likelihood): - Likelihood to return for this CustomInfoType. This base - value can be altered by a detection rule if the finding - meets the criteria specified by the rule. Defaults to - ``VERY_LIKELY`` if not specified. - dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): - A list of phrases to detect as a - CustomInfoType. - - This field is a member of `oneof`_ ``type``. - regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression based CustomInfoType. - - This field is a member of `oneof`_ ``type``. - surrogate_type (google.cloud.dlp_v2.types.CustomInfoType.SurrogateType): - Message for detecting output from - deidentification transformations that support - reversing. - - This field is a member of `oneof`_ ``type``. - stored_type (google.cloud.dlp_v2.types.StoredType): - Load an existing ``StoredInfoType`` resource for use in - ``InspectDataSource``. Not currently supported in - ``InspectContent``. - - This field is a member of `oneof`_ ``type``. - detection_rules (MutableSequence[google.cloud.dlp_v2.types.CustomInfoType.DetectionRule]): - Set of detection rules to apply to all findings of this - CustomInfoType. Rules are applied in order that they are - specified. Not supported for the ``surrogate_type`` - CustomInfoType. - exclusion_type (google.cloud.dlp_v2.types.CustomInfoType.ExclusionType): - If set to EXCLUSION_TYPE_EXCLUDE this infoType will not - cause a finding to be returned. It still can be used for - rules matching. - """ - class ExclusionType(proto.Enum): - r""" - - Values: - EXCLUSION_TYPE_UNSPECIFIED (0): - A finding of this custom info type will not - be excluded from results. - EXCLUSION_TYPE_EXCLUDE (1): - A finding of this custom info type will be - excluded from final results, but can still - affect rule execution. - """ - EXCLUSION_TYPE_UNSPECIFIED = 0 - EXCLUSION_TYPE_EXCLUDE = 1 - - class Dictionary(proto.Message): - r"""Custom information type based on a dictionary of words or phrases. - This can be used to match sensitive information specific to the - data, such as a list of employee IDs or job titles. - - Dictionary words are case-insensitive and all characters other than - letters and digits in the unicode `Basic Multilingual - Plane `__ - will be replaced with whitespace when scanning for matches, so the - dictionary phrase "Sam Johnson" will match all three phrases "sam - johnson", "Sam, Johnson", and "Sam (Johnson)". Additionally, the - characters surrounding any match must be of a different type than - the adjacent characters within the word, so letters must be next to - non-letters and digits next to non-digits. For example, the - dictionary word "jen" will match the first three letters of the text - "jen123" but will return no matches for "jennifer". - - Dictionary words containing a large number of characters that are - not letters or digits may result in unexpected findings because such - characters are treated as whitespace. The - `limits `__ page contains - details about the size limits of dictionaries. For dictionaries that - do not fit within these constraints, consider using - ``LargeCustomDictionaryConfig`` in the ``StoredInfoType`` API. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): - List of words or phrases to search for. - - This field is a member of `oneof`_ ``source``. - cloud_storage_path (google.cloud.dlp_v2.types.CloudStoragePath): - Newline-delimited file of words in Cloud - Storage. Only a single file is accepted. - - This field is a member of `oneof`_ ``source``. - """ - - class WordList(proto.Message): - r"""Message defining a list of words or phrases to search for in - the data. - - Attributes: - words (MutableSequence[str]): - Words or phrases defining the dictionary. The dictionary - must contain at least one phrase and every phrase must - contain at least 2 characters that are letters or digits. - [required] - """ - - words: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - word_list: 'CustomInfoType.Dictionary.WordList' = proto.Field( - proto.MESSAGE, - number=1, - oneof='source', - message='CustomInfoType.Dictionary.WordList', - ) - cloud_storage_path: 'CloudStoragePath' = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message='CloudStoragePath', - ) - - class Regex(proto.Message): - r"""Message defining a custom regular expression. - - Attributes: - pattern (str): - Pattern defining the regular expression. Its - syntax - (https://github.com/google/re2/wiki/Syntax) can - be found under the google/re2 repository on - GitHub. - group_indexes (MutableSequence[int]): - The index of the submatch to extract as - findings. When not specified, the entire match - is returned. No more than 3 may be included. - """ - - pattern: str = proto.Field( - proto.STRING, - number=1, - ) - group_indexes: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=2, - ) - - class SurrogateType(proto.Message): - r"""Message for detecting output from deidentification transformations - such as - ```CryptoReplaceFfxFpeConfig`` `__. - These types of transformations are those that perform - pseudonymization, thereby producing a "surrogate" as output. This - should be used in conjunction with a field on the transformation - such as ``surrogate_info_type``. This CustomInfoType does not - support the use of ``detection_rules``. - - """ - - class DetectionRule(proto.Message): - r"""Deprecated; use ``InspectionRuleSet`` instead. Rule for modifying a - ``CustomInfoType`` to alter behavior under certain circumstances, - depending on the specific details of the rule. Not supported for the - ``surrogate_type`` custom infoType. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): - Hotword-based detection rule. - - This field is a member of `oneof`_ ``type``. - """ - - class Proximity(proto.Message): - r"""Message for specifying a window around a finding to apply a - detection rule. - - Attributes: - window_before (int): - Number of characters before the finding to consider. For - tabular data, if you want to modify the likelihood of an - entire column of findngs, set this to 1. For more - information, see [Hotword example: Set the match likelihood - of a table column] - (https://cloud.google.com/dlp/docs/creating-custom-infotypes-likelihood#match-column-values). - window_after (int): - Number of characters after the finding to - consider. - """ - - window_before: int = proto.Field( - proto.INT32, - number=1, - ) - window_after: int = proto.Field( - proto.INT32, - number=2, - ) - - class LikelihoodAdjustment(proto.Message): - r"""Message for specifying an adjustment to the likelihood of a - finding as part of a detection rule. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - fixed_likelihood (google.cloud.dlp_v2.types.Likelihood): - Set the likelihood of a finding to a fixed - value. - - This field is a member of `oneof`_ ``adjustment``. - relative_likelihood (int): - Increase or decrease the likelihood by the specified number - of levels. For example, if a finding would be ``POSSIBLE`` - without the detection rule and ``relative_likelihood`` is 1, - then it is upgraded to ``LIKELY``, while a value of -1 would - downgrade it to ``UNLIKELY``. Likelihood may never drop - below ``VERY_UNLIKELY`` or exceed ``VERY_LIKELY``, so - applying an adjustment of 1 followed by an adjustment of -1 - when base likelihood is ``VERY_LIKELY`` will result in a - final likelihood of ``LIKELY``. - - This field is a member of `oneof`_ ``adjustment``. - """ - - fixed_likelihood: 'Likelihood' = proto.Field( - proto.ENUM, - number=1, - oneof='adjustment', - enum='Likelihood', - ) - relative_likelihood: int = proto.Field( - proto.INT32, - number=2, - oneof='adjustment', - ) - - class HotwordRule(proto.Message): - r"""The rule that adjusts the likelihood of findings within a - certain proximity of hotwords. - - Attributes: - hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression pattern defining what - qualifies as a hotword. - proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): - Range of characters within which the entire hotword must - reside. The total length of the window cannot exceed 1000 - characters. The finding itself will be included in the - window, so that hotwords can be used to match substrings of - the finding itself. Suppose you want Cloud DLP to promote - the likelihood of the phone number regex "(\d{3}) - \\d{3}-\d{4}" if the area code is known to be the area code - of a company's office. In this case, use the hotword regex - "(xxx)", where "xxx" is the area code in question. - - For tabular data, if you want to modify the likelihood of an - entire column of findngs, see [Hotword example: Set the - match likelihood of a table column] - (https://cloud.google.com/dlp/docs/creating-custom-infotypes-likelihood#match-column-values). - likelihood_adjustment (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.LikelihoodAdjustment): - Likelihood adjustment to apply to all - matching findings. - """ - - hotword_regex: 'CustomInfoType.Regex' = proto.Field( - proto.MESSAGE, - number=1, - message='CustomInfoType.Regex', - ) - proximity: 'CustomInfoType.DetectionRule.Proximity' = proto.Field( - proto.MESSAGE, - number=2, - message='CustomInfoType.DetectionRule.Proximity', - ) - likelihood_adjustment: 'CustomInfoType.DetectionRule.LikelihoodAdjustment' = proto.Field( - proto.MESSAGE, - number=3, - message='CustomInfoType.DetectionRule.LikelihoodAdjustment', - ) - - hotword_rule: 'CustomInfoType.DetectionRule.HotwordRule' = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message='CustomInfoType.DetectionRule.HotwordRule', - ) - - info_type: 'InfoType' = proto.Field( - proto.MESSAGE, - number=1, - message='InfoType', - ) - likelihood: 'Likelihood' = proto.Field( - proto.ENUM, - number=6, - enum='Likelihood', - ) - dictionary: Dictionary = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message=Dictionary, - ) - regex: Regex = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message=Regex, - ) - surrogate_type: SurrogateType = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message=SurrogateType, - ) - stored_type: 'StoredType' = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message='StoredType', - ) - detection_rules: MutableSequence[DetectionRule] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message=DetectionRule, - ) - exclusion_type: ExclusionType = proto.Field( - proto.ENUM, - number=8, - enum=ExclusionType, - ) - - -class FieldId(proto.Message): - r"""General identifier of a data field in a storage service. - - Attributes: - name (str): - Name describing the field. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class PartitionId(proto.Message): - r"""Datastore partition ID. - A partition ID identifies a grouping of entities. The grouping - is always by project and namespace, however the namespace ID may - be empty. - A partition ID contains several dimensions: - project ID and namespace ID. - - Attributes: - project_id (str): - The ID of the project to which the entities - belong. - namespace_id (str): - If not empty, the ID of the namespace to - which the entities belong. - """ - - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - namespace_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class KindExpression(proto.Message): - r"""A representation of a Datastore kind. - - Attributes: - name (str): - The name of the kind. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DatastoreOptions(proto.Message): - r"""Options defining a data set within Google Cloud Datastore. - - Attributes: - partition_id (google.cloud.dlp_v2.types.PartitionId): - A partition ID identifies a grouping of - entities. The grouping is always by project and - namespace, however the namespace ID may be - empty. - kind (google.cloud.dlp_v2.types.KindExpression): - The kind to process. - """ - - partition_id: 'PartitionId' = proto.Field( - proto.MESSAGE, - number=1, - message='PartitionId', - ) - kind: 'KindExpression' = proto.Field( - proto.MESSAGE, - number=2, - message='KindExpression', - ) - - -class CloudStorageRegexFileSet(proto.Message): - r"""Message representing a set of files in a Cloud Storage bucket. - Regular expressions are used to allow fine-grained control over - which files in the bucket to include. - - Included files are those that match at least one item in - ``include_regex`` and do not match any items in ``exclude_regex``. - Note that a file that matches items from both lists will *not* be - included. For a match to occur, the entire file path (i.e., - everything in the url after the bucket name) must match the regular - expression. - - For example, given the input - ``{bucket_name: "mybucket", include_regex: ["directory1/.*"], exclude_regex: ["directory1/excluded.*"]}``: - - - ``gs://mybucket/directory1/myfile`` will be included - - ``gs://mybucket/directory1/directory2/myfile`` will be included - (``.*`` matches across ``/``) - - ``gs://mybucket/directory0/directory1/myfile`` will *not* be - included (the full path doesn't match any items in - ``include_regex``) - - ``gs://mybucket/directory1/excludedfile`` will *not* be included - (the path matches an item in ``exclude_regex``) - - If ``include_regex`` is left empty, it will match all files by - default (this is equivalent to setting ``include_regex: [".*"]``). - - Some other common use cases: - - - ``{bucket_name: "mybucket", exclude_regex: [".*\.pdf"]}`` will - include all files in ``mybucket`` except for .pdf files - - ``{bucket_name: "mybucket", include_regex: ["directory/[^/]+"]}`` - will include all files directly under - ``gs://mybucket/directory/``, without matching across ``/`` - - Attributes: - bucket_name (str): - The name of a Cloud Storage bucket. Required. - include_regex (MutableSequence[str]): - A list of regular expressions matching file paths to - include. All files in the bucket that match at least one of - these regular expressions will be included in the set of - files, except for those that also match an item in - ``exclude_regex``. Leaving this field empty will match all - files by default (this is equivalent to including ``.*`` in - the list). - - Regular expressions use RE2 - `syntax `__; a - guide can be found under the google/re2 repository on - GitHub. - exclude_regex (MutableSequence[str]): - A list of regular expressions matching file paths to - exclude. All files in the bucket that match at least one of - these regular expressions will be excluded from the scan. - - Regular expressions use RE2 - `syntax `__; a - guide can be found under the google/re2 repository on - GitHub. - """ - - bucket_name: str = proto.Field( - proto.STRING, - number=1, - ) - include_regex: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - exclude_regex: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class CloudStorageOptions(proto.Message): - r"""Options defining a file or a set of files within a Cloud - Storage bucket. - - Attributes: - file_set (google.cloud.dlp_v2.types.CloudStorageOptions.FileSet): - The set of one or more files to scan. - bytes_limit_per_file (int): - Max number of bytes to scan from a file. If a scanned file's - size is bigger than this value then the rest of the bytes - are omitted. Only one of bytes_limit_per_file and - bytes_limit_per_file_percent can be specified. Cannot be set - if de-identification is requested. - bytes_limit_per_file_percent (int): - Max percentage of bytes to scan from a file. The rest are - omitted. The number of bytes scanned is rounded down. Must - be between 0 and 100, inclusively. Both 0 and 100 means no - limit. Defaults to 0. Only one of bytes_limit_per_file and - bytes_limit_per_file_percent can be specified. Cannot be set - if de-identification is requested. - file_types (MutableSequence[google.cloud.dlp_v2.types.FileType]): - List of file type groups to include in the scan. If empty, - all files are scanned and available data format processors - are applied. In addition, the binary content of the selected - files is always scanned as well. Images are scanned only as - binary if the specified region does not support image - inspection and no file_types were specified. Image - inspection is restricted to 'global', 'us', 'asia', and - 'europe'. - sample_method (google.cloud.dlp_v2.types.CloudStorageOptions.SampleMethod): - - files_limit_percent (int): - Limits the number of files to scan to this - percentage of the input FileSet. Number of files - scanned is rounded down. Must be between 0 and - 100, inclusively. Both 0 and 100 means no limit. - Defaults to 0. - """ - class SampleMethod(proto.Enum): - r"""How to sample bytes if not all bytes are scanned. Meaningful only - when used in conjunction with bytes_limit_per_file. If not - specified, scanning would start from the top. - - Values: - SAMPLE_METHOD_UNSPECIFIED (0): - No description available. - TOP (1): - Scan from the top (default). - RANDOM_START (2): - For each file larger than bytes_limit_per_file, randomly - pick the offset to start scanning. The scanned bytes are - contiguous. - """ - SAMPLE_METHOD_UNSPECIFIED = 0 - TOP = 1 - RANDOM_START = 2 - - class FileSet(proto.Message): - r"""Set of files to scan. - - Attributes: - url (str): - The Cloud Storage url of the file(s) to scan, in the format - ``gs:///``. Trailing wildcard in the path is - allowed. - - If the url ends in a trailing slash, the bucket or directory - represented by the url will be scanned non-recursively - (content in sub-directories will not be scanned). This means - that ``gs://mybucket/`` is equivalent to - ``gs://mybucket/*``, and ``gs://mybucket/directory/`` is - equivalent to ``gs://mybucket/directory/*``. - - Exactly one of ``url`` or ``regex_file_set`` must be set. - regex_file_set (google.cloud.dlp_v2.types.CloudStorageRegexFileSet): - The regex-filtered set of files to scan. Exactly one of - ``url`` or ``regex_file_set`` must be set. - """ - - url: str = proto.Field( - proto.STRING, - number=1, - ) - regex_file_set: 'CloudStorageRegexFileSet' = proto.Field( - proto.MESSAGE, - number=2, - message='CloudStorageRegexFileSet', - ) - - file_set: FileSet = proto.Field( - proto.MESSAGE, - number=1, - message=FileSet, - ) - bytes_limit_per_file: int = proto.Field( - proto.INT64, - number=4, - ) - bytes_limit_per_file_percent: int = proto.Field( - proto.INT32, - number=8, - ) - file_types: MutableSequence['FileType'] = proto.RepeatedField( - proto.ENUM, - number=5, - enum='FileType', - ) - sample_method: SampleMethod = proto.Field( - proto.ENUM, - number=6, - enum=SampleMethod, - ) - files_limit_percent: int = proto.Field( - proto.INT32, - number=7, - ) - - -class CloudStorageFileSet(proto.Message): - r"""Message representing a set of files in Cloud Storage. - - Attributes: - url (str): - The url, in the format ``gs:///``. Trailing - wildcard in the path is allowed. - """ - - url: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CloudStoragePath(proto.Message): - r"""Message representing a single file or path in Cloud Storage. - - Attributes: - path (str): - A url representing a file or path (no wildcards) in Cloud - Storage. Example: gs://[BUCKET_NAME]/dictionary.txt - """ - - path: str = proto.Field( - proto.STRING, - number=1, - ) - - -class BigQueryOptions(proto.Message): - r"""Options defining BigQuery table and row identifiers. - - Attributes: - table_reference (google.cloud.dlp_v2.types.BigQueryTable): - Complete BigQuery table reference. - identifying_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Table fields that may uniquely identify a row within the - table. When ``actions.saveFindings.outputConfig.table`` is - specified, the values of columns specified here are - available in the output table under - ``location.content_locations.record_location.record_key.id_values``. - Nested fields such as ``person.birthdate.year`` are allowed. - rows_limit (int): - Max number of rows to scan. If the table has more rows than - this value, the rest of the rows are omitted. If not set, or - if set to 0, all rows will be scanned. Only one of - rows_limit and rows_limit_percent can be specified. Cannot - be used in conjunction with TimespanConfig. - rows_limit_percent (int): - Max percentage of rows to scan. The rest are omitted. The - number of rows scanned is rounded down. Must be between 0 - and 100, inclusively. Both 0 and 100 means no limit. - Defaults to 0. Only one of rows_limit and rows_limit_percent - can be specified. Cannot be used in conjunction with - TimespanConfig. - sample_method (google.cloud.dlp_v2.types.BigQueryOptions.SampleMethod): - - excluded_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - References to fields excluded from scanning. - This allows you to skip inspection of entire - columns which you know have no findings. - included_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Limit scanning only to these fields. - """ - class SampleMethod(proto.Enum): - r"""How to sample rows if not all rows are scanned. Meaningful only when - used in conjunction with either rows_limit or rows_limit_percent. If - not specified, rows are scanned in the order BigQuery reads them. - - Values: - SAMPLE_METHOD_UNSPECIFIED (0): - No description available. - TOP (1): - Scan groups of rows in the order BigQuery - provides (default). Multiple groups of rows may - be scanned in parallel, so results may not - appear in the same order the rows are read. - RANDOM_START (2): - Randomly pick groups of rows to scan. - """ - SAMPLE_METHOD_UNSPECIFIED = 0 - TOP = 1 - RANDOM_START = 2 - - table_reference: 'BigQueryTable' = proto.Field( - proto.MESSAGE, - number=1, - message='BigQueryTable', - ) - identifying_fields: MutableSequence['FieldId'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='FieldId', - ) - rows_limit: int = proto.Field( - proto.INT64, - number=3, - ) - rows_limit_percent: int = proto.Field( - proto.INT32, - number=6, - ) - sample_method: SampleMethod = proto.Field( - proto.ENUM, - number=4, - enum=SampleMethod, - ) - excluded_fields: MutableSequence['FieldId'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='FieldId', - ) - included_fields: MutableSequence['FieldId'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='FieldId', - ) - - -class StorageConfig(proto.Message): - r"""Shared message indicating Cloud storage type. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - datastore_options (google.cloud.dlp_v2.types.DatastoreOptions): - Google Cloud Datastore options. - - This field is a member of `oneof`_ ``type``. - cloud_storage_options (google.cloud.dlp_v2.types.CloudStorageOptions): - Cloud Storage options. - - This field is a member of `oneof`_ ``type``. - big_query_options (google.cloud.dlp_v2.types.BigQueryOptions): - BigQuery options. - - This field is a member of `oneof`_ ``type``. - hybrid_options (google.cloud.dlp_v2.types.HybridOptions): - Hybrid inspection options. - - This field is a member of `oneof`_ ``type``. - timespan_config (google.cloud.dlp_v2.types.StorageConfig.TimespanConfig): - - """ - - class TimespanConfig(proto.Message): - r"""Configuration of the timespan of the items to include in - scanning. Currently only supported when inspecting Cloud Storage - and BigQuery. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - Exclude files, tables, or rows older than - this value. If not set, no lower time limit is - applied. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Exclude files, tables, or rows newer than - this value. If not set, no upper time limit is - applied. - timestamp_field (google.cloud.dlp_v2.types.FieldId): - Specification of the field containing the timestamp of - scanned items. Used for data sources like Datastore and - BigQuery. - - For BigQuery - - If this value is not specified and the table was modified - between the given start and end times, the entire table will - be scanned. If this value is specified, then rows are - filtered based on the given start and end times. Rows with a - ``NULL`` value in the provided BigQuery column are skipped. - Valid data types of the provided BigQuery column are: - ``INTEGER``, ``DATE``, ``TIMESTAMP``, and ``DATETIME``. - - If your BigQuery table is `partitioned at ingestion - time `__, - you can use any of the following pseudo-columns as your - timestamp field. When used with Cloud DLP, these - pseudo-column names are case sensitive. - - .. raw:: html - -
    -
  • _PARTITIONTIME
  • -
  • _PARTITIONDATE
  • -
  • _PARTITION_LOAD_TIME
  • -
- - For Datastore - - If this value is specified, then entities are filtered based - on the given start and end times. If an entity does not - contain the provided timestamp property or contains empty or - invalid values, then it is included. Valid data types of the - provided timestamp property are: ``TIMESTAMP``. - - See the `known - issue `__ - related to this operation. - enable_auto_population_of_timespan_config (bool): - When the job is started by a JobTrigger we will - automatically figure out a valid start_time to avoid - scanning files that have not been modified since the last - time the JobTrigger executed. This will be based on the time - of the execution of the last run of the JobTrigger or the - timespan end_time used in the last run of the JobTrigger. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - timestamp_field: 'FieldId' = proto.Field( - proto.MESSAGE, - number=3, - message='FieldId', - ) - enable_auto_population_of_timespan_config: bool = proto.Field( - proto.BOOL, - number=4, - ) - - datastore_options: 'DatastoreOptions' = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message='DatastoreOptions', - ) - cloud_storage_options: 'CloudStorageOptions' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='CloudStorageOptions', - ) - big_query_options: 'BigQueryOptions' = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message='BigQueryOptions', - ) - hybrid_options: 'HybridOptions' = proto.Field( - proto.MESSAGE, - number=9, - oneof='type', - message='HybridOptions', - ) - timespan_config: TimespanConfig = proto.Field( - proto.MESSAGE, - number=6, - message=TimespanConfig, - ) - - -class HybridOptions(proto.Message): - r"""Configuration to control jobs where the content being - inspected is outside of Google Cloud Platform. - - Attributes: - description (str): - A short description of where the data is - coming from. Will be stored once in the job. 256 - max length. - required_finding_label_keys (MutableSequence[str]): - These are labels that each inspection request must include - within their 'finding_labels' map. Request may contain - others, but any missing one of these will be rejected. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - No more than 10 keys can be required. - labels (MutableMapping[str, str]): - To organize findings, these labels will be added to each - finding. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 10 labels can be associated with a given - finding. - - Examples: - - - ``"environment" : "production"`` - - ``"pipeline" : "etl"`` - table_options (google.cloud.dlp_v2.types.TableOptions): - If the container is a table, additional - information to make findings meaningful such as - the columns that are primary keys. - """ - - description: str = proto.Field( - proto.STRING, - number=1, - ) - required_finding_label_keys: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - table_options: 'TableOptions' = proto.Field( - proto.MESSAGE, - number=4, - message='TableOptions', - ) - - -class BigQueryKey(proto.Message): - r"""Row key for identifying a record in BigQuery table. - - Attributes: - table_reference (google.cloud.dlp_v2.types.BigQueryTable): - Complete BigQuery table reference. - row_number (int): - Row number inferred at the time the table was scanned. This - value is nondeterministic, cannot be queried, and may be - null for inspection jobs. To locate findings within a table, - specify - ``inspect_job.storage_config.big_query_options.identifying_fields`` - in ``CreateDlpJobRequest``. - """ - - table_reference: 'BigQueryTable' = proto.Field( - proto.MESSAGE, - number=1, - message='BigQueryTable', - ) - row_number: int = proto.Field( - proto.INT64, - number=2, - ) - - -class DatastoreKey(proto.Message): - r"""Record key for a finding in Cloud Datastore. - - Attributes: - entity_key (google.cloud.dlp_v2.types.Key): - Datastore entity key. - """ - - entity_key: 'Key' = proto.Field( - proto.MESSAGE, - number=1, - message='Key', - ) - - -class Key(proto.Message): - r"""A unique identifier for a Datastore entity. - If a key's partition ID or any of its path kinds or names are - reserved/read-only, the key is reserved/read-only. - A reserved/read-only key is forbidden in certain documented - contexts. - - Attributes: - partition_id (google.cloud.dlp_v2.types.PartitionId): - Entities are partitioned into subsets, - currently identified by a project ID and - namespace ID. Queries are scoped to a single - partition. - path (MutableSequence[google.cloud.dlp_v2.types.Key.PathElement]): - The entity path. An entity path consists of one or more - elements composed of a kind and a string or numerical - identifier, which identify entities. The first element - identifies a *root entity*, the second element identifies a - *child* of the root entity, the third element identifies a - child of the second entity, and so forth. The entities - identified by all prefixes of the path are called the - element's *ancestors*. - - A path can never be empty, and a path can have at most 100 - elements. - """ - - class PathElement(proto.Message): - r"""A (kind, ID/name) pair used to construct a key path. - If either name or ID is set, the element is complete. If neither - is set, the element is incomplete. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - kind (str): - The kind of the entity. A kind matching regex ``__.*__`` is - reserved/read-only. A kind must not contain more than 1500 - bytes when UTF-8 encoded. Cannot be ``""``. - id (int): - The auto-allocated ID of the entity. - Never equal to zero. Values less than zero are - discouraged and may not be supported in the - future. - - This field is a member of `oneof`_ ``id_type``. - name (str): - The name of the entity. A name matching regex ``__.*__`` is - reserved/read-only. A name must not be more than 1500 bytes - when UTF-8 encoded. Cannot be ``""``. - - This field is a member of `oneof`_ ``id_type``. - """ - - kind: str = proto.Field( - proto.STRING, - number=1, - ) - id: int = proto.Field( - proto.INT64, - number=2, - oneof='id_type', - ) - name: str = proto.Field( - proto.STRING, - number=3, - oneof='id_type', - ) - - partition_id: 'PartitionId' = proto.Field( - proto.MESSAGE, - number=1, - message='PartitionId', - ) - path: MutableSequence[PathElement] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=PathElement, - ) - - -class RecordKey(proto.Message): - r"""Message for a unique key indicating a record that contains a - finding. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - datastore_key (google.cloud.dlp_v2.types.DatastoreKey): - - This field is a member of `oneof`_ ``type``. - big_query_key (google.cloud.dlp_v2.types.BigQueryKey): - - This field is a member of `oneof`_ ``type``. - id_values (MutableSequence[str]): - Values of identifying columns in the given row. Order of - values matches the order of ``identifying_fields`` specified - in the scanning request. - """ - - datastore_key: 'DatastoreKey' = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message='DatastoreKey', - ) - big_query_key: 'BigQueryKey' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='BigQueryKey', - ) - id_values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - - -class BigQueryTable(proto.Message): - r"""Message defining the location of a BigQuery table. A table is - uniquely identified by its project_id, dataset_id, and table_name. - Within a query a table is often referenced with a string in the - format of: ``:.`` or - ``..``. - - Attributes: - project_id (str): - The Google Cloud Platform project ID of the - project containing the table. If omitted, - project ID is inferred from the API call. - dataset_id (str): - Dataset ID of the table. - table_id (str): - Name of the table. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - dataset_id: str = proto.Field( - proto.STRING, - number=2, - ) - table_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class BigQueryField(proto.Message): - r"""Message defining a field of a BigQuery table. - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Source table of the field. - field (google.cloud.dlp_v2.types.FieldId): - Designated field in the BigQuery table. - """ - - table: 'BigQueryTable' = proto.Field( - proto.MESSAGE, - number=1, - message='BigQueryTable', - ) - field: 'FieldId' = proto.Field( - proto.MESSAGE, - number=2, - message='FieldId', - ) - - -class EntityId(proto.Message): - r"""An entity in a dataset is a field or set of fields that correspond - to a single person. For example, in medical records the ``EntityId`` - might be a patient identifier, or for financial records it might be - an account identifier. This message is used when generalizations or - analysis must take into account that multiple rows correspond to the - same entity. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Composite key indicating which field contains - the entity identifier. - """ - - field: 'FieldId' = proto.Field( - proto.MESSAGE, - number=1, - message='FieldId', - ) - - -class TableOptions(proto.Message): - r"""Instructions regarding the table content being inspected. - - Attributes: - identifying_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - The columns that are the primary keys for - table objects included in ContentItem. A copy of - this cell's value will stored alongside - alongside each finding so that the finding can - be traced to the specific row it came from. No - more than 3 may be provided. - """ - - identifying_fields: MutableSequence['FieldId'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='FieldId', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/mypy.ini b/owl-bot-staging/v2/mypy.ini deleted file mode 100644 index 574c5aed..00000000 --- a/owl-bot-staging/v2/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/v2/noxfile.py b/owl-bot-staging/v2/noxfile.py deleted file mode 100644 index 6b1462df..00000000 --- a/owl-bot-staging/v2/noxfile.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "lint_setup_py", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/dlp_v2/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py deleted file mode 100644 index e4371abf..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ActivateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ActivateJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_activate_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ActivateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.activate_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ActivateJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py deleted file mode 100644 index c0b4fac1..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ActivateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ActivateJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_activate_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ActivateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.activate_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ActivateJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py deleted file mode 100644 index d8190299..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CancelDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_cancel_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CancelDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.cancel_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_CancelDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py deleted file mode 100644 index 7475d6fa..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CancelDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_cancel_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CancelDlpJobRequest( - name="name_value", - ) - - # Make the request - client.cancel_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_CancelDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py deleted file mode 100644 index 81ad2519..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDeidentifyTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py deleted file mode 100644 index b394f634..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDeidentifyTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py deleted file mode 100644 index 28770717..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDlpJobRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py deleted file mode 100644 index 779754f6..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDlpJobRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py deleted file mode 100644 index aeb40676..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateInspectTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateInspectTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py deleted file mode 100644 index 0e344b36..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateInspectTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateInspectTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py deleted file mode 100644 index 3e82b8f3..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - job_trigger = dlp_v2.JobTrigger() - job_trigger.status = "CANCELLED" - - request = dlp_v2.CreateJobTriggerRequest( - parent="parent_value", - job_trigger=job_trigger, - ) - - # Make the request - response = await client.create_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py deleted file mode 100644 index ebb74284..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - job_trigger = dlp_v2.JobTrigger() - job_trigger.status = "CANCELLED" - - request = dlp_v2.CreateJobTriggerRequest( - parent="parent_value", - job_trigger=job_trigger, - ) - - # Make the request - response = client.create_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py deleted file mode 100644 index cae6db89..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateStoredInfoType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateStoredInfoTypeRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py deleted file mode 100644 index d59a301d..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateStoredInfoType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateStoredInfoTypeRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py deleted file mode 100644 index 4903b032..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeidentifyContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeidentifyContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_deidentify_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeidentifyContentRequest( - ) - - # Make the request - response = await client.deidentify_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_DeidentifyContent_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py deleted file mode 100644 index 2422616c..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeidentifyContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeidentifyContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_deidentify_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeidentifyContentRequest( - ) - - # Make the request - response = client.deidentify_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_DeidentifyContent_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py deleted file mode 100644 index f544f12d..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_deidentify_template(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py deleted file mode 100644 index a33f3b26..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_deidentify_template(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py deleted file mode 100644 index 8737125b..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.delete_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py deleted file mode 100644 index bb0ce9df..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDlpJobRequest( - name="name_value", - ) - - # Make the request - client.delete_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py deleted file mode 100644 index f0aec8eb..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteInspectTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteInspectTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_inspect_template(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py deleted file mode 100644 index c908d867..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteInspectTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteInspectTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_inspect_template(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py deleted file mode 100644 index 3784ee3e..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteJobTriggerRequest( - name="name_value", - ) - - # Make the request - await client.delete_job_trigger(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py deleted file mode 100644 index 9f4405da..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteJobTriggerRequest( - name="name_value", - ) - - # Make the request - client.delete_job_trigger(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py deleted file mode 100644 index 652d88ff..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteStoredInfoType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - await client.delete_stored_info_type(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py deleted file mode 100644 index 7e37ce36..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteStoredInfoType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - client.delete_stored_info_type(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py deleted file mode 100644 index 869504da..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FinishDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_FinishDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_finish_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.FinishDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.finish_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_FinishDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py deleted file mode 100644 index 1b694f90..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FinishDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_FinishDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_finish_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.FinishDlpJobRequest( - name="name_value", - ) - - # Make the request - client.finish_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_FinishDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py deleted file mode 100644 index fc1570d3..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetDeidentifyTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py deleted file mode 100644 index bb1e1986..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py deleted file mode 100644 index 2065aa85..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetDlpJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py deleted file mode 100644 index 13959bde..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetDlpJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py deleted file mode 100644 index 1a9c9649..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetInspectTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py deleted file mode 100644 index 112e3d83..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetInspectTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py deleted file mode 100644 index 248184c7..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py deleted file mode 100644 index 9c6cdb3a..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.get_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py deleted file mode 100644 index a7820fe2..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetStoredInfoType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py deleted file mode 100644 index d0b0a44c..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetStoredInfoType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = client.get_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py deleted file mode 100644 index e9f9be5a..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for HybridInspectDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_HybridInspectDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_hybrid_inspect_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectDlpJobRequest( - name="name_value", - ) - - # Make the request - response = await client.hybrid_inspect_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_HybridInspectDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py deleted file mode 100644 index 2bfd7fe1..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for HybridInspectDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_HybridInspectDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_hybrid_inspect_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectDlpJobRequest( - name="name_value", - ) - - # Make the request - response = client.hybrid_inspect_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_HybridInspectDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py deleted file mode 100644 index dbdd91c2..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for HybridInspectJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_HybridInspectJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_hybrid_inspect_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.hybrid_inspect_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_HybridInspectJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py deleted file mode 100644 index a9c4c85e..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for HybridInspectJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_hybrid_inspect_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.hybrid_inspect_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py deleted file mode 100644 index 3f24588b..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for InspectContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_InspectContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_inspect_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.InspectContentRequest( - ) - - # Make the request - response = await client.inspect_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_InspectContent_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py deleted file mode 100644 index 4b5a10f3..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for InspectContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_InspectContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_inspect_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.InspectContentRequest( - ) - - # Make the request - response = client.inspect_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_InspectContent_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py deleted file mode 100644 index d1a40dc0..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDeidentifyTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListDeidentifyTemplates_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_deidentify_templates(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListDeidentifyTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_deidentify_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListDeidentifyTemplates_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py deleted file mode 100644 index 6a01f0fb..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDeidentifyTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_deidentify_templates(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListDeidentifyTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_deidentify_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py deleted file mode 100644 index 57c790d8..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDlpJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListDlpJobs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_dlp_jobs(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListDlpJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_dlp_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListDlpJobs_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py deleted file mode 100644 index 7d06c237..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDlpJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListDlpJobs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_dlp_jobs(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListDlpJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_dlp_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListDlpJobs_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py deleted file mode 100644 index 16b871f8..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInfoTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListInfoTypes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_info_types(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListInfoTypesRequest( - ) - - # Make the request - response = await client.list_info_types(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ListInfoTypes_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py deleted file mode 100644 index 9e3ca167..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInfoTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListInfoTypes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_info_types(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListInfoTypesRequest( - ) - - # Make the request - response = client.list_info_types(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ListInfoTypes_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py deleted file mode 100644 index 6e405a4f..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInspectTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListInspectTemplates_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_inspect_templates(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListInspectTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_inspect_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListInspectTemplates_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py deleted file mode 100644 index 71673677..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInspectTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListInspectTemplates_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_inspect_templates(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListInspectTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_inspect_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListInspectTemplates_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py deleted file mode 100644 index e8c0281f..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobTriggers -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListJobTriggers_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_job_triggers(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListJobTriggersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_triggers(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListJobTriggers_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py deleted file mode 100644 index 0f9141c0..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobTriggers -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListJobTriggers_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_job_triggers(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListJobTriggersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_triggers(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListJobTriggers_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py deleted file mode 100644 index 460c99c4..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListStoredInfoTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListStoredInfoTypes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_stored_info_types(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListStoredInfoTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_stored_info_types(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListStoredInfoTypes_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py deleted file mode 100644 index 1ad1796e..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListStoredInfoTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListStoredInfoTypes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_stored_info_types(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListStoredInfoTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_stored_info_types(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListStoredInfoTypes_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py deleted file mode 100644 index a7a0d502..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RedactImage -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_RedactImage_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_redact_image(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.RedactImageRequest( - ) - - # Make the request - response = await client.redact_image(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_RedactImage_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py deleted file mode 100644 index 272bdb80..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RedactImage -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_RedactImage_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_redact_image(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.RedactImageRequest( - ) - - # Make the request - response = client.redact_image(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_RedactImage_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py deleted file mode 100644 index 401f62df..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReidentifyContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ReidentifyContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_reidentify_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ReidentifyContentRequest( - parent="parent_value", - ) - - # Make the request - response = await client.reidentify_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ReidentifyContent_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py deleted file mode 100644 index 9e654be9..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReidentifyContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ReidentifyContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_reidentify_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ReidentifyContentRequest( - parent="parent_value", - ) - - # Make the request - response = client.reidentify_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ReidentifyContent_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py deleted file mode 100644 index 8b32186c..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_update_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.update_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py deleted file mode 100644 index e3296531..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_update_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.update_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py deleted file mode 100644 index 8e062116..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateInspectTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_update_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.update_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py deleted file mode 100644 index 332c5de6..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateInspectTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_update_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.update_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py deleted file mode 100644 index 58baaeeb..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_update_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.update_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py deleted file mode 100644 index 3694b5ff..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_update_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.update_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py deleted file mode 100644 index d5658d32..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateStoredInfoType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_update_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.update_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py deleted file mode 100644 index 9471180b..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateStoredInfoType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_update_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = client.update_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json b/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json deleted file mode 100644 index 956f9eab..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json +++ /dev/null @@ -1,5503 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.privacy.dlp.v2", - "version": "v2" - } - ], - "language": "PYTHON", - "name": "google-cloud-dlp", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.activate_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ActivateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ActivateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ActivateJobTriggerRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "activate_job_trigger" - }, - "description": "Sample for ActivateJobTrigger", - "file": "dlp_v2_generated_dlp_service_activate_job_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ActivateJobTrigger_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_activate_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.activate_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ActivateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ActivateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ActivateJobTriggerRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "activate_job_trigger" - }, - "description": "Sample for ActivateJobTrigger", - "file": "dlp_v2_generated_dlp_service_activate_job_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ActivateJobTrigger_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_activate_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.cancel_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CancelDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CancelDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CancelDlpJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "cancel_dlp_job" - }, - "description": "Sample for CancelDlpJob", - "file": "dlp_v2_generated_dlp_service_cancel_dlp_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CancelDlpJob_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_cancel_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.cancel_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CancelDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CancelDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CancelDlpJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "cancel_dlp_job" - }, - "description": "Sample for CancelDlpJob", - "file": "dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CancelDlpJob_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "deidentify_template", - "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "create_deidentify_template" - }, - "description": "Sample for CreateDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_create_deidentify_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_deidentify_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "deidentify_template", - "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "create_deidentify_template" - }, - "description": "Sample for CreateDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_create_deidentify_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_deidentify_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateDlpJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "inspect_job", - "type": "google.cloud.dlp_v2.types.InspectJobConfig" - }, - { - "name": "risk_job", - "type": "google.cloud.dlp_v2.types.RiskAnalysisJobConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "create_dlp_job" - }, - "description": "Sample for CreateDlpJob", - "file": "dlp_v2_generated_dlp_service_create_dlp_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateDlpJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateDlpJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "inspect_job", - "type": "google.cloud.dlp_v2.types.InspectJobConfig" - }, - { - "name": "risk_job", - "type": "google.cloud.dlp_v2.types.RiskAnalysisJobConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "create_dlp_job" - }, - "description": "Sample for CreateDlpJob", - "file": "dlp_v2_generated_dlp_service_create_dlp_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateDlpJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateInspectTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "inspect_template", - "type": "google.cloud.dlp_v2.types.InspectTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "create_inspect_template" - }, - "description": "Sample for CreateInspectTemplate", - "file": "dlp_v2_generated_dlp_service_create_inspect_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateInspectTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_inspect_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateInspectTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "inspect_template", - "type": "google.cloud.dlp_v2.types.InspectTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "create_inspect_template" - }, - "description": "Sample for CreateInspectTemplate", - "file": "dlp_v2_generated_dlp_service_create_inspect_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateInspectTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_inspect_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateJobTriggerRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job_trigger", - "type": "google.cloud.dlp_v2.types.JobTrigger" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "create_job_trigger" - }, - "description": "Sample for CreateJobTrigger", - "file": "dlp_v2_generated_dlp_service_create_job_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateJobTrigger_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateJobTriggerRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job_trigger", - "type": "google.cloud.dlp_v2.types.JobTrigger" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "create_job_trigger" - }, - "description": "Sample for CreateJobTrigger", - "file": "dlp_v2_generated_dlp_service_create_job_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateJobTrigger_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "config", - "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "create_stored_info_type" - }, - "description": "Sample for CreateStoredInfoType", - "file": "dlp_v2_generated_dlp_service_create_stored_info_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateStoredInfoType_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_stored_info_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "config", - "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "create_stored_info_type" - }, - "description": "Sample for CreateStoredInfoType", - "file": "dlp_v2_generated_dlp_service_create_stored_info_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateStoredInfoType_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_stored_info_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.deidentify_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeidentifyContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeidentifyContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeidentifyContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyContentResponse", - "shortName": "deidentify_content" - }, - "description": "Sample for DeidentifyContent", - "file": "dlp_v2_generated_dlp_service_deidentify_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeidentifyContent_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_deidentify_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.deidentify_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeidentifyContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeidentifyContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeidentifyContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyContentResponse", - "shortName": "deidentify_content" - }, - "description": "Sample for DeidentifyContent", - "file": "dlp_v2_generated_dlp_service_deidentify_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeidentifyContent_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_deidentify_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_deidentify_template" - }, - "description": "Sample for DeleteDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_delete_deidentify_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_deidentify_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_deidentify_template" - }, - "description": "Sample for DeleteDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_dlp_job" - }, - "description": "Sample for DeleteDlpJob", - "file": "dlp_v2_generated_dlp_service_delete_dlp_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteDlpJob_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_dlp_job" - }, - "description": "Sample for DeleteDlpJob", - "file": "dlp_v2_generated_dlp_service_delete_dlp_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteDlpJob_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_inspect_template" - }, - "description": "Sample for DeleteInspectTemplate", - "file": "dlp_v2_generated_dlp_service_delete_inspect_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteInspectTemplate_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_inspect_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_inspect_template" - }, - "description": "Sample for DeleteInspectTemplate", - "file": "dlp_v2_generated_dlp_service_delete_inspect_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteInspectTemplate_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_inspect_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_job_trigger" - }, - "description": "Sample for DeleteJobTrigger", - "file": "dlp_v2_generated_dlp_service_delete_job_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteJobTrigger_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_job_trigger" - }, - "description": "Sample for DeleteJobTrigger", - "file": "dlp_v2_generated_dlp_service_delete_job_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteJobTrigger_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_stored_info_type" - }, - "description": "Sample for DeleteStoredInfoType", - "file": "dlp_v2_generated_dlp_service_delete_stored_info_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteStoredInfoType_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_stored_info_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_stored_info_type" - }, - "description": "Sample for DeleteStoredInfoType", - "file": "dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteStoredInfoType_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.finish_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.FinishDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "FinishDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.FinishDlpJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "finish_dlp_job" - }, - "description": "Sample for FinishDlpJob", - "file": "dlp_v2_generated_dlp_service_finish_dlp_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_FinishDlpJob_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_finish_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.finish_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.FinishDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "FinishDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.FinishDlpJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "finish_dlp_job" - }, - "description": "Sample for FinishDlpJob", - "file": "dlp_v2_generated_dlp_service_finish_dlp_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_FinishDlpJob_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_finish_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "get_deidentify_template" - }, - "description": "Sample for GetDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_get_deidentify_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetDeidentifyTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_deidentify_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "get_deidentify_template" - }, - "description": "Sample for GetDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_get_deidentify_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_deidentify_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "get_dlp_job" - }, - "description": "Sample for GetDlpJob", - "file": "dlp_v2_generated_dlp_service_get_dlp_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetDlpJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "get_dlp_job" - }, - "description": "Sample for GetDlpJob", - "file": "dlp_v2_generated_dlp_service_get_dlp_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetDlpJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "get_inspect_template" - }, - "description": "Sample for GetInspectTemplate", - "file": "dlp_v2_generated_dlp_service_get_inspect_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetInspectTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_inspect_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "get_inspect_template" - }, - "description": "Sample for GetInspectTemplate", - "file": "dlp_v2_generated_dlp_service_get_inspect_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetInspectTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_inspect_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "get_job_trigger" - }, - "description": "Sample for GetJobTrigger", - "file": "dlp_v2_generated_dlp_service_get_job_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetJobTrigger_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "get_job_trigger" - }, - "description": "Sample for GetJobTrigger", - "file": "dlp_v2_generated_dlp_service_get_job_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetJobTrigger_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "get_stored_info_type" - }, - "description": "Sample for GetStoredInfoType", - "file": "dlp_v2_generated_dlp_service_get_stored_info_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetStoredInfoType_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_stored_info_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "get_stored_info_type" - }, - "description": "Sample for GetStoredInfoType", - "file": "dlp_v2_generated_dlp_service_get_stored_info_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetStoredInfoType_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_stored_info_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.hybrid_inspect_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "HybridInspectDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.HybridInspectDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", - "shortName": "hybrid_inspect_dlp_job" - }, - "description": "Sample for HybridInspectDlpJob", - "file": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_HybridInspectDlpJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.hybrid_inspect_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "HybridInspectDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.HybridInspectDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", - "shortName": "hybrid_inspect_dlp_job" - }, - "description": "Sample for HybridInspectDlpJob", - "file": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_HybridInspectDlpJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.hybrid_inspect_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "HybridInspectJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", - "shortName": "hybrid_inspect_job_trigger" - }, - "description": "Sample for HybridInspectJobTrigger", - "file": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_HybridInspectJobTrigger_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.hybrid_inspect_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "HybridInspectJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", - "shortName": "hybrid_inspect_job_trigger" - }, - "description": "Sample for HybridInspectJobTrigger", - "file": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.inspect_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.InspectContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "InspectContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.InspectContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectContentResponse", - "shortName": "inspect_content" - }, - "description": "Sample for InspectContent", - "file": "dlp_v2_generated_dlp_service_inspect_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_InspectContent_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_inspect_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.inspect_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.InspectContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "InspectContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.InspectContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectContentResponse", - "shortName": "inspect_content" - }, - "description": "Sample for InspectContent", - "file": "dlp_v2_generated_dlp_service_inspect_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_InspectContent_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_inspect_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_deidentify_templates", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListDeidentifyTemplates", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListDeidentifyTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager", - "shortName": "list_deidentify_templates" - }, - "description": "Sample for ListDeidentifyTemplates", - "file": "dlp_v2_generated_dlp_service_list_deidentify_templates_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListDeidentifyTemplates_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_deidentify_templates_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_deidentify_templates", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListDeidentifyTemplates", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListDeidentifyTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager", - "shortName": "list_deidentify_templates" - }, - "description": "Sample for ListDeidentifyTemplates", - "file": "dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_dlp_jobs", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListDlpJobs", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListDlpJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListDlpJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager", - "shortName": "list_dlp_jobs" - }, - "description": "Sample for ListDlpJobs", - "file": "dlp_v2_generated_dlp_service_list_dlp_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListDlpJobs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_dlp_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_dlp_jobs", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListDlpJobs", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListDlpJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListDlpJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager", - "shortName": "list_dlp_jobs" - }, - "description": "Sample for ListDlpJobs", - "file": "dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListDlpJobs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_info_types", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListInfoTypes", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListInfoTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListInfoTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ListInfoTypesResponse", - "shortName": "list_info_types" - }, - "description": "Sample for ListInfoTypes", - "file": "dlp_v2_generated_dlp_service_list_info_types_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListInfoTypes_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_info_types_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_info_types", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListInfoTypes", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListInfoTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListInfoTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ListInfoTypesResponse", - "shortName": "list_info_types" - }, - "description": "Sample for ListInfoTypes", - "file": "dlp_v2_generated_dlp_service_list_info_types_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListInfoTypes_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_info_types_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_inspect_templates", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListInspectTemplates", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListInspectTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListInspectTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager", - "shortName": "list_inspect_templates" - }, - "description": "Sample for ListInspectTemplates", - "file": "dlp_v2_generated_dlp_service_list_inspect_templates_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListInspectTemplates_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_inspect_templates_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_inspect_templates", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListInspectTemplates", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListInspectTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListInspectTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager", - "shortName": "list_inspect_templates" - }, - "description": "Sample for ListInspectTemplates", - "file": "dlp_v2_generated_dlp_service_list_inspect_templates_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListInspectTemplates_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_inspect_templates_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_job_triggers", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListJobTriggers", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListJobTriggers" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListJobTriggersRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager", - "shortName": "list_job_triggers" - }, - "description": "Sample for ListJobTriggers", - "file": "dlp_v2_generated_dlp_service_list_job_triggers_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListJobTriggers_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_job_triggers_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_job_triggers", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListJobTriggers", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListJobTriggers" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListJobTriggersRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager", - "shortName": "list_job_triggers" - }, - "description": "Sample for ListJobTriggers", - "file": "dlp_v2_generated_dlp_service_list_job_triggers_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListJobTriggers_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_job_triggers_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_stored_info_types", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListStoredInfoTypes", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListStoredInfoTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListStoredInfoTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager", - "shortName": "list_stored_info_types" - }, - "description": "Sample for ListStoredInfoTypes", - "file": "dlp_v2_generated_dlp_service_list_stored_info_types_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListStoredInfoTypes_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_stored_info_types_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_stored_info_types", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListStoredInfoTypes", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListStoredInfoTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListStoredInfoTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager", - "shortName": "list_stored_info_types" - }, - "description": "Sample for ListStoredInfoTypes", - "file": "dlp_v2_generated_dlp_service_list_stored_info_types_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListStoredInfoTypes_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_stored_info_types_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.redact_image", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.RedactImage", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "RedactImage" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.RedactImageRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.RedactImageResponse", - "shortName": "redact_image" - }, - "description": "Sample for RedactImage", - "file": "dlp_v2_generated_dlp_service_redact_image_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_RedactImage_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_redact_image_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.redact_image", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.RedactImage", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "RedactImage" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.RedactImageRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.RedactImageResponse", - "shortName": "redact_image" - }, - "description": "Sample for RedactImage", - "file": "dlp_v2_generated_dlp_service_redact_image_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_RedactImage_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_redact_image_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.reidentify_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ReidentifyContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ReidentifyContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ReidentifyContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ReidentifyContentResponse", - "shortName": "reidentify_content" - }, - "description": "Sample for ReidentifyContent", - "file": "dlp_v2_generated_dlp_service_reidentify_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ReidentifyContent_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_reidentify_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.reidentify_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ReidentifyContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ReidentifyContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ReidentifyContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ReidentifyContentResponse", - "shortName": "reidentify_content" - }, - "description": "Sample for ReidentifyContent", - "file": "dlp_v2_generated_dlp_service_reidentify_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ReidentifyContent_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_reidentify_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "deidentify_template", - "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "update_deidentify_template" - }, - "description": "Sample for UpdateDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_update_deidentify_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_deidentify_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "deidentify_template", - "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "update_deidentify_template" - }, - "description": "Sample for UpdateDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_update_deidentify_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_deidentify_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "inspect_template", - "type": "google.cloud.dlp_v2.types.InspectTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "update_inspect_template" - }, - "description": "Sample for UpdateInspectTemplate", - "file": "dlp_v2_generated_dlp_service_update_inspect_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateInspectTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_inspect_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "inspect_template", - "type": "google.cloud.dlp_v2.types.InspectTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "update_inspect_template" - }, - "description": "Sample for UpdateInspectTemplate", - "file": "dlp_v2_generated_dlp_service_update_inspect_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateInspectTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_inspect_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "job_trigger", - "type": "google.cloud.dlp_v2.types.JobTrigger" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "update_job_trigger" - }, - "description": "Sample for UpdateJobTrigger", - "file": "dlp_v2_generated_dlp_service_update_job_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateJobTrigger_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "job_trigger", - "type": "google.cloud.dlp_v2.types.JobTrigger" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "update_job_trigger" - }, - "description": "Sample for UpdateJobTrigger", - "file": "dlp_v2_generated_dlp_service_update_job_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateJobTrigger_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "config", - "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "update_stored_info_type" - }, - "description": "Sample for UpdateStoredInfoType", - "file": "dlp_v2_generated_dlp_service_update_stored_info_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateStoredInfoType_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_stored_info_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "config", - "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "update_stored_info_type" - }, - "description": "Sample for UpdateStoredInfoType", - "file": "dlp_v2_generated_dlp_service_update_stored_info_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateStoredInfoType_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_stored_info_type_sync.py" - } - ] -} diff --git a/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py b/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py deleted file mode 100644 index 9adcd0d5..00000000 --- a/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py +++ /dev/null @@ -1,209 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class dlpCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'activate_job_trigger': ('name', ), - 'cancel_dlp_job': ('name', ), - 'create_deidentify_template': ('parent', 'deidentify_template', 'template_id', 'location_id', ), - 'create_dlp_job': ('parent', 'inspect_job', 'risk_job', 'job_id', 'location_id', ), - 'create_inspect_template': ('parent', 'inspect_template', 'template_id', 'location_id', ), - 'create_job_trigger': ('parent', 'job_trigger', 'trigger_id', 'location_id', ), - 'create_stored_info_type': ('parent', 'config', 'stored_info_type_id', 'location_id', ), - 'deidentify_content': ('parent', 'deidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'deidentify_template_name', 'location_id', ), - 'delete_deidentify_template': ('name', ), - 'delete_dlp_job': ('name', ), - 'delete_inspect_template': ('name', ), - 'delete_job_trigger': ('name', ), - 'delete_stored_info_type': ('name', ), - 'finish_dlp_job': ('name', ), - 'get_deidentify_template': ('name', ), - 'get_dlp_job': ('name', ), - 'get_inspect_template': ('name', ), - 'get_job_trigger': ('name', ), - 'get_stored_info_type': ('name', ), - 'hybrid_inspect_dlp_job': ('name', 'hybrid_item', ), - 'hybrid_inspect_job_trigger': ('name', 'hybrid_item', ), - 'inspect_content': ('parent', 'inspect_config', 'item', 'inspect_template_name', 'location_id', ), - 'list_deidentify_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'list_dlp_jobs': ('parent', 'filter', 'page_size', 'page_token', 'type_', 'order_by', 'location_id', ), - 'list_info_types': ('parent', 'language_code', 'filter', 'location_id', ), - 'list_inspect_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'list_job_triggers': ('parent', 'page_token', 'page_size', 'order_by', 'filter', 'type_', 'location_id', ), - 'list_stored_info_types': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'redact_image': ('parent', 'location_id', 'inspect_config', 'image_redaction_configs', 'include_findings', 'byte_item', ), - 'reidentify_content': ('parent', 'reidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'reidentify_template_name', 'location_id', ), - 'update_deidentify_template': ('name', 'deidentify_template', 'update_mask', ), - 'update_inspect_template': ('name', 'inspect_template', 'update_mask', ), - 'update_job_trigger': ('name', 'job_trigger', 'update_mask', ), - 'update_stored_info_type': ('name', 'config', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=dlpCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the dlp client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v2/setup.py b/owl-bot-staging/v2/setup.py deleted file mode 100644 index 2b4eb21b..00000000 --- a/owl-bot-staging/v2/setup.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-dlp' - - -description = "Google Cloud Dlp API client library" - -version = {} -with open(os.path.join(package_root, 'google/cloud/dlp/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -url = "https://github.com/googleapis/python-dlp" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") -] - -namespaces = ["google", "google.cloud"] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - namespace_packages=namespaces, - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/v2/testing/constraints-3.10.txt b/owl-bot-staging/v2/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.11.txt b/owl-bot-staging/v2/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.12.txt b/owl-bot-staging/v2/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.7.txt b/owl-bot-staging/v2/testing/constraints-3.7.txt deleted file mode 100644 index 6c44adfe..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.7.txt +++ /dev/null @@ -1,9 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -proto-plus==1.22.0 -protobuf==3.19.5 diff --git a/owl-bot-staging/v2/testing/constraints-3.8.txt b/owl-bot-staging/v2/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.9.txt b/owl-bot-staging/v2/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v2/tests/__init__.py b/owl-bot-staging/v2/tests/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v2/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/__init__.py b/owl-bot-staging/v2/tests/unit/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v2/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py deleted file mode 100644 index 94f19f6e..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py +++ /dev/null @@ -1,17403 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dlp_v2.services.dlp_service import DlpServiceAsyncClient -from google.cloud.dlp_v2.services.dlp_service import DlpServiceClient -from google.cloud.dlp_v2.services.dlp_service import pagers -from google.cloud.dlp_v2.services.dlp_service import transports -from google.cloud.dlp_v2.types import dlp -from google.cloud.dlp_v2.types import storage -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import date_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore -from google.type import timeofday_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DlpServiceClient._get_default_mtls_endpoint(None) is None - assert DlpServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DlpServiceClient, "grpc"), - (DlpServiceAsyncClient, "grpc_asyncio"), - (DlpServiceClient, "rest"), -]) -def test_dlp_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dlp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dlp.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DlpServiceGrpcTransport, "grpc"), - (transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.DlpServiceRestTransport, "rest"), -]) -def test_dlp_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DlpServiceClient, "grpc"), - (DlpServiceAsyncClient, "grpc_asyncio"), - (DlpServiceClient, "rest"), -]) -def test_dlp_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dlp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dlp.googleapis.com' - ) - - -def test_dlp_service_client_get_transport_class(): - transport = DlpServiceClient.get_transport_class() - available_transports = [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceRestTransport, - ] - assert transport in available_transports - - transport = DlpServiceClient.get_transport_class("grpc") - assert transport == transports.DlpServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), -]) -@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) -@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) -def test_dlp_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "true"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "false"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "true"), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) -@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_dlp_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DlpServiceClient, DlpServiceAsyncClient -]) -@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) -@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) -def test_dlp_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), -]) -def test_dlp_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", grpc_helpers), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest", None), -]) -def test_dlp_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_dlp_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DlpServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", grpc_helpers), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_dlp_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dlp.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dlp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.InspectContentRequest, - dict, -]) -def test_inspect_content(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectContentResponse( - ) - response = client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.InspectContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectContentResponse) - - -def test_inspect_content_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - client.inspect_content() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.InspectContentRequest() - -@pytest.mark.asyncio -async def test_inspect_content_async(transport: str = 'grpc_asyncio', request_type=dlp.InspectContentRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse( - )) - response = await client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.InspectContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectContentResponse) - - -@pytest.mark.asyncio -async def test_inspect_content_async_from_dict(): - await test_inspect_content_async(request_type=dict) - - -def test_inspect_content_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.InspectContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - call.return_value = dlp.InspectContentResponse() - client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_inspect_content_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.InspectContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse()) - await client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.RedactImageRequest, - dict, -]) -def test_redact_image(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.RedactImageResponse( - redacted_image=b'redacted_image_blob', - extracted_text='extracted_text_value', - ) - response = client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.RedactImageRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.RedactImageResponse) - assert response.redacted_image == b'redacted_image_blob' - assert response.extracted_text == 'extracted_text_value' - - -def test_redact_image_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - client.redact_image() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.RedactImageRequest() - -@pytest.mark.asyncio -async def test_redact_image_async(transport: str = 'grpc_asyncio', request_type=dlp.RedactImageRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse( - redacted_image=b'redacted_image_blob', - extracted_text='extracted_text_value', - )) - response = await client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.RedactImageRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.RedactImageResponse) - assert response.redacted_image == b'redacted_image_blob' - assert response.extracted_text == 'extracted_text_value' - - -@pytest.mark.asyncio -async def test_redact_image_async_from_dict(): - await test_redact_image_async(request_type=dict) - - -def test_redact_image_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.RedactImageRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - call.return_value = dlp.RedactImageResponse() - client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_redact_image_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.RedactImageRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse()) - await client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.DeidentifyContentRequest, - dict, -]) -def test_deidentify_content(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyContentResponse( - ) - response = client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeidentifyContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyContentResponse) - - -def test_deidentify_content_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - client.deidentify_content() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeidentifyContentRequest() - -@pytest.mark.asyncio -async def test_deidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.DeidentifyContentRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse( - )) - response = await client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeidentifyContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyContentResponse) - - -@pytest.mark.asyncio -async def test_deidentify_content_async_from_dict(): - await test_deidentify_content_async(request_type=dict) - - -def test_deidentify_content_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeidentifyContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - call.return_value = dlp.DeidentifyContentResponse() - client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_deidentify_content_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeidentifyContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse()) - await client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.ReidentifyContentRequest, - dict, -]) -def test_reidentify_content(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ReidentifyContentResponse( - ) - response = client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ReidentifyContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ReidentifyContentResponse) - - -def test_reidentify_content_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - client.reidentify_content() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ReidentifyContentRequest() - -@pytest.mark.asyncio -async def test_reidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.ReidentifyContentRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse( - )) - response = await client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ReidentifyContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ReidentifyContentResponse) - - -@pytest.mark.asyncio -async def test_reidentify_content_async_from_dict(): - await test_reidentify_content_async(request_type=dict) - - -def test_reidentify_content_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ReidentifyContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - call.return_value = dlp.ReidentifyContentResponse() - client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_reidentify_content_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ReidentifyContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse()) - await client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.ListInfoTypesRequest, - dict, -]) -def test_list_info_types(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInfoTypesResponse( - ) - response = client.list_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInfoTypesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ListInfoTypesResponse) - - -def test_list_info_types_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - client.list_info_types() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInfoTypesRequest() - -@pytest.mark.asyncio -async def test_list_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInfoTypesRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse( - )) - response = await client.list_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInfoTypesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ListInfoTypesResponse) - - -@pytest.mark.asyncio -async def test_list_info_types_async_from_dict(): - await test_list_info_types_async(request_type=dict) - - -def test_list_info_types_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInfoTypesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_info_types_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_info_types( - dlp.ListInfoTypesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_info_types_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInfoTypesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_info_types_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_info_types( - dlp.ListInfoTypesRequest(), - parent='parent_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateInspectTemplateRequest, - dict, -]) -def test_create_inspect_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_inspect_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - client.create_inspect_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateInspectTemplateRequest() - -@pytest.mark.asyncio -async def test_create_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_create_inspect_template_async_from_dict(): - await test_create_inspect_template_async(request_type=dict) - - -def test_create_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateInspectTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - call.return_value = dlp.InspectTemplate() - client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateInspectTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - await client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_inspect_template( - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].inspect_template - mock_val = dlp.InspectTemplate(name='name_value') - assert arg == mock_val - - -def test_create_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_inspect_template( - dlp.CreateInspectTemplateRequest(), - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_inspect_template( - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].inspect_template - mock_val = dlp.InspectTemplate(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_inspect_template( - dlp.CreateInspectTemplateRequest(), - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateInspectTemplateRequest, - dict, -]) -def test_update_inspect_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_inspect_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - client.update_inspect_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateInspectTemplateRequest() - -@pytest.mark.asyncio -async def test_update_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_update_inspect_template_async_from_dict(): - await test_update_inspect_template_async(request_type=dict) - - -def test_update_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - call.return_value = dlp.InspectTemplate() - client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - await client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_inspect_template( - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].inspect_template - mock_val = dlp.InspectTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_inspect_template( - dlp.UpdateInspectTemplateRequest(), - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_inspect_template( - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].inspect_template - mock_val = dlp.InspectTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_inspect_template( - dlp.UpdateInspectTemplateRequest(), - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetInspectTemplateRequest, - dict, -]) -def test_get_inspect_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_inspect_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - client.get_inspect_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetInspectTemplateRequest() - -@pytest.mark.asyncio -async def test_get_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_get_inspect_template_async_from_dict(): - await test_get_inspect_template_async(request_type=dict) - - -def test_get_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - call.return_value = dlp.InspectTemplate() - client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - await client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_inspect_template( - dlp.GetInspectTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_inspect_template( - dlp.GetInspectTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListInspectTemplatesRequest, - dict, -]) -def test_list_inspect_templates(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInspectTemplatesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInspectTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInspectTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_inspect_templates_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - client.list_inspect_templates() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInspectTemplatesRequest() - -@pytest.mark.asyncio -async def test_list_inspect_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInspectTemplatesRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInspectTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInspectTemplatesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_inspect_templates_async_from_dict(): - await test_list_inspect_templates_async(request_type=dict) - - -def test_list_inspect_templates_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListInspectTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - call.return_value = dlp.ListInspectTemplatesResponse() - client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_inspect_templates_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListInspectTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) - await client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_inspect_templates_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInspectTemplatesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_inspect_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_inspect_templates_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_inspect_templates( - dlp.ListInspectTemplatesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_inspect_templates_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInspectTemplatesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_inspect_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_inspect_templates_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_inspect_templates( - dlp.ListInspectTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_inspect_templates_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_inspect_templates(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.InspectTemplate) - for i in results) -def test_list_inspect_templates_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - pages = list(client.list_inspect_templates(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_inspect_templates_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_inspect_templates(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.InspectTemplate) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_inspect_templates_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_inspect_templates(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteInspectTemplateRequest, - dict, -]) -def test_delete_inspect_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_inspect_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - client.delete_inspect_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteInspectTemplateRequest() - -@pytest.mark.asyncio -async def test_delete_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_inspect_template_async_from_dict(): - await test_delete_inspect_template_async(request_type=dict) - - -def test_delete_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - call.return_value = None - client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_inspect_template( - dlp.DeleteInspectTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_inspect_template( - dlp.DeleteInspectTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateDeidentifyTemplateRequest, - dict, -]) -def test_create_deidentify_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_deidentify_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - client.create_deidentify_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDeidentifyTemplateRequest() - -@pytest.mark.asyncio -async def test_create_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_create_deidentify_template_async_from_dict(): - await test_create_deidentify_template_async(request_type=dict) - - -def test_create_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDeidentifyTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - call.return_value = dlp.DeidentifyTemplate() - client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDeidentifyTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - await client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_deidentify_template( - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].deidentify_template - mock_val = dlp.DeidentifyTemplate(name='name_value') - assert arg == mock_val - - -def test_create_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_deidentify_template( - dlp.CreateDeidentifyTemplateRequest(), - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_deidentify_template( - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].deidentify_template - mock_val = dlp.DeidentifyTemplate(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_deidentify_template( - dlp.CreateDeidentifyTemplateRequest(), - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateDeidentifyTemplateRequest, - dict, -]) -def test_update_deidentify_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_deidentify_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - client.update_deidentify_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateDeidentifyTemplateRequest() - -@pytest.mark.asyncio -async def test_update_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_update_deidentify_template_async_from_dict(): - await test_update_deidentify_template_async(request_type=dict) - - -def test_update_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - call.return_value = dlp.DeidentifyTemplate() - client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - await client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_deidentify_template( - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].deidentify_template - mock_val = dlp.DeidentifyTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_deidentify_template( - dlp.UpdateDeidentifyTemplateRequest(), - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_deidentify_template( - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].deidentify_template - mock_val = dlp.DeidentifyTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_deidentify_template( - dlp.UpdateDeidentifyTemplateRequest(), - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetDeidentifyTemplateRequest, - dict, -]) -def test_get_deidentify_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_deidentify_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - client.get_deidentify_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDeidentifyTemplateRequest() - -@pytest.mark.asyncio -async def test_get_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_get_deidentify_template_async_from_dict(): - await test_get_deidentify_template_async(request_type=dict) - - -def test_get_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - call.return_value = dlp.DeidentifyTemplate() - client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - await client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_deidentify_template( - dlp.GetDeidentifyTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_deidentify_template( - dlp.GetDeidentifyTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListDeidentifyTemplatesRequest, - dict, -]) -def test_list_deidentify_templates(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDeidentifyTemplatesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDeidentifyTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeidentifyTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_deidentify_templates_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - client.list_deidentify_templates() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDeidentifyTemplatesRequest() - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDeidentifyTemplatesRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDeidentifyTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeidentifyTemplatesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async_from_dict(): - await test_list_deidentify_templates_async(request_type=dict) - - -def test_list_deidentify_templates_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDeidentifyTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - call.return_value = dlp.ListDeidentifyTemplatesResponse() - client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_deidentify_templates_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDeidentifyTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) - await client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_deidentify_templates_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDeidentifyTemplatesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_deidentify_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_deidentify_templates_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_deidentify_templates( - dlp.ListDeidentifyTemplatesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_deidentify_templates_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDeidentifyTemplatesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_deidentify_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_deidentify_templates_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_deidentify_templates( - dlp.ListDeidentifyTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_deidentify_templates_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_deidentify_templates(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.DeidentifyTemplate) - for i in results) -def test_list_deidentify_templates_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - pages = list(client.list_deidentify_templates(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_deidentify_templates(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.DeidentifyTemplate) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_deidentify_templates(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteDeidentifyTemplateRequest, - dict, -]) -def test_delete_deidentify_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_deidentify_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - client.delete_deidentify_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDeidentifyTemplateRequest() - -@pytest.mark.asyncio -async def test_delete_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_deidentify_template_async_from_dict(): - await test_delete_deidentify_template_async(request_type=dict) - - -def test_delete_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - call.return_value = None - client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_deidentify_template( - dlp.DeleteDeidentifyTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_deidentify_template( - dlp.DeleteDeidentifyTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateJobTriggerRequest, - dict, -]) -def test_create_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - response = client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_create_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - client.create_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateJobTriggerRequest() - -@pytest.mark.asyncio -async def test_create_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - )) - response = await client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -@pytest.mark.asyncio -async def test_create_job_trigger_async_from_dict(): - await test_create_job_trigger_async(request_type=dict) - - -def test_create_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateJobTriggerRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - call.return_value = dlp.JobTrigger() - client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateJobTriggerRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - await client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_job_trigger( - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].job_trigger - mock_val = dlp.JobTrigger(name='name_value') - assert arg == mock_val - - -def test_create_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job_trigger( - dlp.CreateJobTriggerRequest(), - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_job_trigger( - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].job_trigger - mock_val = dlp.JobTrigger(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_job_trigger( - dlp.CreateJobTriggerRequest(), - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateJobTriggerRequest, - dict, -]) -def test_update_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - response = client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_update_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - client.update_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateJobTriggerRequest() - -@pytest.mark.asyncio -async def test_update_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - )) - response = await client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -@pytest.mark.asyncio -async def test_update_job_trigger_async_from_dict(): - await test_update_job_trigger_async(request_type=dict) - - -def test_update_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - call.return_value = dlp.JobTrigger() - client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - await client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_job_trigger( - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].job_trigger - mock_val = dlp.JobTrigger(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_job_trigger( - dlp.UpdateJobTriggerRequest(), - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_job_trigger( - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].job_trigger - mock_val = dlp.JobTrigger(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_job_trigger( - dlp.UpdateJobTriggerRequest(), - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.HybridInspectJobTriggerRequest, - dict, -]) -def test_hybrid_inspect_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse( - ) - response = client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -def test_hybrid_inspect_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - client.hybrid_inspect_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectJobTriggerRequest() - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( - )) - response = await client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_async_from_dict(): - await test_hybrid_inspect_job_trigger_async(request_type=dict) - - -def test_hybrid_inspect_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - call.return_value = dlp.HybridInspectResponse() - client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - await client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_hybrid_inspect_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.hybrid_inspect_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_hybrid_inspect_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.hybrid_inspect_job_trigger( - dlp.HybridInspectJobTriggerRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.hybrid_inspect_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.hybrid_inspect_job_trigger( - dlp.HybridInspectJobTriggerRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetJobTriggerRequest, - dict, -]) -def test_get_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - response = client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_get_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - client.get_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetJobTriggerRequest() - -@pytest.mark.asyncio -async def test_get_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.GetJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - )) - response = await client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -@pytest.mark.asyncio -async def test_get_job_trigger_async_from_dict(): - await test_get_job_trigger_async(request_type=dict) - - -def test_get_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - call.return_value = dlp.JobTrigger() - client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - await client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job_trigger( - dlp.GetJobTriggerRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job_trigger( - dlp.GetJobTriggerRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListJobTriggersRequest, - dict, -]) -def test_list_job_triggers(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListJobTriggersResponse( - next_page_token='next_page_token_value', - ) - response = client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListJobTriggersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTriggersPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_job_triggers_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - client.list_job_triggers() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListJobTriggersRequest() - -@pytest.mark.asyncio -async def test_list_job_triggers_async(transport: str = 'grpc_asyncio', request_type=dlp.ListJobTriggersRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListJobTriggersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTriggersAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_job_triggers_async_from_dict(): - await test_list_job_triggers_async(request_type=dict) - - -def test_list_job_triggers_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListJobTriggersRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - call.return_value = dlp.ListJobTriggersResponse() - client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_job_triggers_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListJobTriggersRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) - await client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_job_triggers_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListJobTriggersResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_job_triggers( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_job_triggers_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_job_triggers( - dlp.ListJobTriggersRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_job_triggers_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListJobTriggersResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_job_triggers( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_job_triggers_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_job_triggers( - dlp.ListJobTriggersRequest(), - parent='parent_value', - ) - - -def test_list_job_triggers_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_job_triggers(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.JobTrigger) - for i in results) -def test_list_job_triggers_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - pages = list(client.list_job_triggers(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_job_triggers_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_job_triggers(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.JobTrigger) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_job_triggers_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_job_triggers(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteJobTriggerRequest, - dict, -]) -def test_delete_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - client.delete_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteJobTriggerRequest() - -@pytest.mark.asyncio -async def test_delete_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_job_trigger_async_from_dict(): - await test_delete_job_trigger_async(request_type=dict) - - -def test_delete_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - call.return_value = None - client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job_trigger( - dlp.DeleteJobTriggerRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_job_trigger( - dlp.DeleteJobTriggerRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ActivateJobTriggerRequest, - dict, -]) -def test_activate_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - response = client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ActivateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_activate_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - client.activate_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ActivateJobTriggerRequest() - -@pytest.mark.asyncio -async def test_activate_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.ActivateJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - )) - response = await client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ActivateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -@pytest.mark.asyncio -async def test_activate_job_trigger_async_from_dict(): - await test_activate_job_trigger_async(request_type=dict) - - -def test_activate_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ActivateJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - call.return_value = dlp.DlpJob() - client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_activate_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ActivateJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - await client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateDlpJobRequest, - dict, -]) -def test_create_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - response = client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_create_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - client.create_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDlpJobRequest() - -@pytest.mark.asyncio -async def test_create_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - )) - response = await client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -@pytest.mark.asyncio -async def test_create_dlp_job_async_from_dict(): - await test_create_dlp_job_async(request_type=dict) - - -def test_create_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDlpJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - call.return_value = dlp.DlpJob() - client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDlpJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - await client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_dlp_job( - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) - - -def test_create_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dlp_job( - dlp.CreateDlpJobRequest(), - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - -@pytest.mark.asyncio -async def test_create_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_dlp_job( - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) - -@pytest.mark.asyncio -async def test_create_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_dlp_job( - dlp.CreateDlpJobRequest(), - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListDlpJobsRequest, - dict, -]) -def test_list_dlp_jobs(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDlpJobsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDlpJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDlpJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_dlp_jobs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - client.list_dlp_jobs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDlpJobsRequest() - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDlpJobsRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDlpJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDlpJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async_from_dict(): - await test_list_dlp_jobs_async(request_type=dict) - - -def test_list_dlp_jobs_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDlpJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - call.return_value = dlp.ListDlpJobsResponse() - client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_dlp_jobs_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDlpJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) - await client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_dlp_jobs_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDlpJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_dlp_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_dlp_jobs_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_dlp_jobs( - dlp.ListDlpJobsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_dlp_jobs_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDlpJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_dlp_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_dlp_jobs_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_dlp_jobs( - dlp.ListDlpJobsRequest(), - parent='parent_value', - ) - - -def test_list_dlp_jobs_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_dlp_jobs(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.DlpJob) - for i in results) -def test_list_dlp_jobs_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - pages = list(client.list_dlp_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_dlp_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.DlpJob) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_dlp_jobs(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.GetDlpJobRequest, - dict, -]) -def test_get_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - response = client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_get_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - client.get_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDlpJobRequest() - -@pytest.mark.asyncio -async def test_get_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - )) - response = await client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -@pytest.mark.asyncio -async def test_get_dlp_job_async_from_dict(): - await test_get_dlp_job_async(request_type=dict) - - -def test_get_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - call.return_value = dlp.DlpJob() - client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - await client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_dlp_job( - dlp.GetDlpJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_dlp_job( - dlp.GetDlpJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteDlpJobRequest, - dict, -]) -def test_delete_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - client.delete_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDlpJobRequest() - -@pytest.mark.asyncio -async def test_delete_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_dlp_job_async_from_dict(): - await test_delete_dlp_job_async(request_type=dict) - - -def test_delete_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - call.return_value = None - client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dlp_job( - dlp.DeleteDlpJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_dlp_job( - dlp.DeleteDlpJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CancelDlpJobRequest, - dict, -]) -def test_cancel_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CancelDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_cancel_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - client.cancel_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CancelDlpJobRequest() - -@pytest.mark.asyncio -async def test_cancel_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CancelDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CancelDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_dlp_job_async_from_dict(): - await test_cancel_dlp_job_async(request_type=dict) - - -def test_cancel_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CancelDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - call.return_value = None - client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_cancel_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CancelDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateStoredInfoTypeRequest, - dict, -]) -def test_create_stored_info_type(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType( - name='name_value', - ) - response = client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_create_stored_info_type_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - client.create_stored_info_type() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateStoredInfoTypeRequest() - -@pytest.mark.asyncio -async def test_create_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( - name='name_value', - )) - response = await client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_create_stored_info_type_async_from_dict(): - await test_create_stored_info_type_async(request_type=dict) - - -def test_create_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateStoredInfoTypeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - call.return_value = dlp.StoredInfoType() - client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateStoredInfoTypeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - await client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_stored_info_type( - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].config - mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') - assert arg == mock_val - - -def test_create_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_stored_info_type( - dlp.CreateStoredInfoTypeRequest(), - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - -@pytest.mark.asyncio -async def test_create_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_stored_info_type( - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].config - mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_stored_info_type( - dlp.CreateStoredInfoTypeRequest(), - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateStoredInfoTypeRequest, - dict, -]) -def test_update_stored_info_type(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType( - name='name_value', - ) - response = client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_update_stored_info_type_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - client.update_stored_info_type() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateStoredInfoTypeRequest() - -@pytest.mark.asyncio -async def test_update_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( - name='name_value', - )) - response = await client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_update_stored_info_type_async_from_dict(): - await test_update_stored_info_type_async(request_type=dict) - - -def test_update_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - call.return_value = dlp.StoredInfoType() - client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - await client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_stored_info_type( - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].config - mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_stored_info_type( - dlp.UpdateStoredInfoTypeRequest(), - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_stored_info_type( - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].config - mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_stored_info_type( - dlp.UpdateStoredInfoTypeRequest(), - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetStoredInfoTypeRequest, - dict, -]) -def test_get_stored_info_type(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType( - name='name_value', - ) - response = client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_get_stored_info_type_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - client.get_stored_info_type() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetStoredInfoTypeRequest() - -@pytest.mark.asyncio -async def test_get_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.GetStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( - name='name_value', - )) - response = await client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_stored_info_type_async_from_dict(): - await test_get_stored_info_type_async(request_type=dict) - - -def test_get_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - call.return_value = dlp.StoredInfoType() - client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - await client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_stored_info_type( - dlp.GetStoredInfoTypeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_stored_info_type( - dlp.GetStoredInfoTypeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListStoredInfoTypesRequest, - dict, -]) -def test_list_stored_info_types(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListStoredInfoTypesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListStoredInfoTypesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListStoredInfoTypesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_stored_info_types_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - client.list_stored_info_types() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListStoredInfoTypesRequest() - -@pytest.mark.asyncio -async def test_list_stored_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListStoredInfoTypesRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListStoredInfoTypesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListStoredInfoTypesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_stored_info_types_async_from_dict(): - await test_list_stored_info_types_async(request_type=dict) - - -def test_list_stored_info_types_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListStoredInfoTypesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - call.return_value = dlp.ListStoredInfoTypesResponse() - client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_stored_info_types_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListStoredInfoTypesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) - await client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_stored_info_types_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListStoredInfoTypesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_stored_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_stored_info_types_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_stored_info_types( - dlp.ListStoredInfoTypesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_stored_info_types_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListStoredInfoTypesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_stored_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_stored_info_types_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_stored_info_types( - dlp.ListStoredInfoTypesRequest(), - parent='parent_value', - ) - - -def test_list_stored_info_types_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_stored_info_types(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.StoredInfoType) - for i in results) -def test_list_stored_info_types_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - pages = list(client.list_stored_info_types(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_stored_info_types_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_stored_info_types(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.StoredInfoType) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_stored_info_types_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_stored_info_types(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteStoredInfoTypeRequest, - dict, -]) -def test_delete_stored_info_type(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_stored_info_type_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - client.delete_stored_info_type() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteStoredInfoTypeRequest() - -@pytest.mark.asyncio -async def test_delete_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_stored_info_type_async_from_dict(): - await test_delete_stored_info_type_async(request_type=dict) - - -def test_delete_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - call.return_value = None - client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_stored_info_type( - dlp.DeleteStoredInfoTypeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_stored_info_type( - dlp.DeleteStoredInfoTypeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.HybridInspectDlpJobRequest, - dict, -]) -def test_hybrid_inspect_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse( - ) - response = client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -def test_hybrid_inspect_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - client.hybrid_inspect_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectDlpJobRequest() - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( - )) - response = await client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_async_from_dict(): - await test_hybrid_inspect_dlp_job_async(request_type=dict) - - -def test_hybrid_inspect_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - call.return_value = dlp.HybridInspectResponse() - client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - await client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_hybrid_inspect_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.hybrid_inspect_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_hybrid_inspect_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.hybrid_inspect_dlp_job( - dlp.HybridInspectDlpJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.hybrid_inspect_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.hybrid_inspect_dlp_job( - dlp.HybridInspectDlpJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.FinishDlpJobRequest, - dict, -]) -def test_finish_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.FinishDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_finish_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - client.finish_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.FinishDlpJobRequest() - -@pytest.mark.asyncio -async def test_finish_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.FinishDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.FinishDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_finish_dlp_job_async_from_dict(): - await test_finish_dlp_job_async(request_type=dict) - - -def test_finish_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.FinishDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - call.return_value = None - client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_finish_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.FinishDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.InspectContentRequest, - dict, -]) -def test_inspect_content_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectContentResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.inspect_content(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectContentResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_inspect_content_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_inspect_content") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_inspect_content") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.InspectContentRequest.pb(dlp.InspectContentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.InspectContentResponse.to_json(dlp.InspectContentResponse()) - - request = dlp.InspectContentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.InspectContentResponse() - - client.inspect_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_inspect_content_rest_bad_request(transport: str = 'rest', request_type=dlp.InspectContentRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.inspect_content(request) - - -def test_inspect_content_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.RedactImageRequest, - dict, -]) -def test_redact_image_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.RedactImageResponse( - redacted_image=b'redacted_image_blob', - extracted_text='extracted_text_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.RedactImageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.redact_image(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.RedactImageResponse) - assert response.redacted_image == b'redacted_image_blob' - assert response.extracted_text == 'extracted_text_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_redact_image_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_redact_image") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_redact_image") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.RedactImageRequest.pb(dlp.RedactImageRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.RedactImageResponse.to_json(dlp.RedactImageResponse()) - - request = dlp.RedactImageRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.RedactImageResponse() - - client.redact_image(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_redact_image_rest_bad_request(transport: str = 'rest', request_type=dlp.RedactImageRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.redact_image(request) - - -def test_redact_image_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeidentifyContentRequest, - dict, -]) -def test_deidentify_content_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyContentResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.deidentify_content(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyContentResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_deidentify_content_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_deidentify_content") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_deidentify_content") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.DeidentifyContentRequest.pb(dlp.DeidentifyContentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DeidentifyContentResponse.to_json(dlp.DeidentifyContentResponse()) - - request = dlp.DeidentifyContentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DeidentifyContentResponse() - - client.deidentify_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_deidentify_content_rest_bad_request(transport: str = 'rest', request_type=dlp.DeidentifyContentRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.deidentify_content(request) - - -def test_deidentify_content_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ReidentifyContentRequest, - dict, -]) -def test_reidentify_content_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ReidentifyContentResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ReidentifyContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.reidentify_content(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ReidentifyContentResponse) - - -def test_reidentify_content_rest_required_fields(request_type=dlp.ReidentifyContentRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reidentify_content._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reidentify_content._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ReidentifyContentResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.ReidentifyContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.reidentify_content(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_reidentify_content_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.reidentify_content._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_reidentify_content_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_reidentify_content") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_reidentify_content") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ReidentifyContentRequest.pb(dlp.ReidentifyContentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ReidentifyContentResponse.to_json(dlp.ReidentifyContentResponse()) - - request = dlp.ReidentifyContentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ReidentifyContentResponse() - - client.reidentify_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_reidentify_content_rest_bad_request(transport: str = 'rest', request_type=dlp.ReidentifyContentRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.reidentify_content(request) - - -def test_reidentify_content_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListInfoTypesRequest, - dict, -]) -def test_list_info_types_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListInfoTypesResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_info_types(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ListInfoTypesResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_info_types_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_info_types") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_info_types") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ListInfoTypesRequest.pb(dlp.ListInfoTypesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ListInfoTypesResponse.to_json(dlp.ListInfoTypesResponse()) - - request = dlp.ListInfoTypesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListInfoTypesResponse() - - client.list_info_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_info_types_rest_bad_request(transport: str = 'rest', request_type=dlp.ListInfoTypesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_info_types(request) - - -def test_list_info_types_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListInfoTypesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_info_types(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/infoTypes" % client.transport._host, args[1]) - - -def test_list_info_types_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_info_types( - dlp.ListInfoTypesRequest(), - parent='parent_value', - ) - - -def test_list_info_types_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateInspectTemplateRequest, - dict, -]) -def test_create_inspect_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_inspect_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_inspect_template_rest_required_fields(request_type=dlp.CreateInspectTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_inspect_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_inspect_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_inspect_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "inspectTemplate", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_inspect_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_inspect_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_inspect_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.CreateInspectTemplateRequest.pb(dlp.CreateInspectTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) - - request = dlp.CreateInspectTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.InspectTemplate() - - client.create_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_inspect_template(request) - - -def test_create_inspect_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_inspect_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/inspectTemplates" % client.transport._host, args[1]) - - -def test_create_inspect_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_inspect_template( - dlp.CreateInspectTemplateRequest(), - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - -def test_create_inspect_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateInspectTemplateRequest, - dict, -]) -def test_update_inspect_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_inspect_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_inspect_template_rest_required_fields(request_type=dlp.UpdateInspectTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_inspect_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_inspect_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_inspect_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_inspect_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_inspect_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_inspect_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.UpdateInspectTemplateRequest.pb(dlp.UpdateInspectTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) - - request = dlp.UpdateInspectTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.InspectTemplate() - - client.update_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_inspect_template(request) - - -def test_update_inspect_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/inspectTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_inspect_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, args[1]) - - -def test_update_inspect_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_inspect_template( - dlp.UpdateInspectTemplateRequest(), - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_inspect_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetInspectTemplateRequest, - dict, -]) -def test_get_inspect_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_inspect_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_inspect_template_rest_required_fields(request_type=dlp.GetInspectTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_inspect_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_inspect_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_inspect_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_inspect_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_inspect_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_inspect_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.GetInspectTemplateRequest.pb(dlp.GetInspectTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) - - request = dlp.GetInspectTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.InspectTemplate() - - client.get_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.GetInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_inspect_template(request) - - -def test_get_inspect_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/inspectTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_inspect_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, args[1]) - - -def test_get_inspect_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_inspect_template( - dlp.GetInspectTemplateRequest(), - name='name_value', - ) - - -def test_get_inspect_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListInspectTemplatesRequest, - dict, -]) -def test_list_inspect_templates_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListInspectTemplatesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_inspect_templates(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInspectTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_inspect_templates_rest_required_fields(request_type=dlp.ListInspectTemplatesRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_inspect_templates._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_inspect_templates._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListInspectTemplatesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_inspect_templates(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_inspect_templates_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_inspect_templates._get_unset_required_fields({}) - assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_inspect_templates_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_inspect_templates") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_inspect_templates") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ListInspectTemplatesRequest.pb(dlp.ListInspectTemplatesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ListInspectTemplatesResponse.to_json(dlp.ListInspectTemplatesResponse()) - - request = dlp.ListInspectTemplatesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListInspectTemplatesResponse() - - client.list_inspect_templates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_inspect_templates_rest_bad_request(transport: str = 'rest', request_type=dlp.ListInspectTemplatesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_inspect_templates(request) - - -def test_list_inspect_templates_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListInspectTemplatesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_inspect_templates(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/inspectTemplates" % client.transport._host, args[1]) - - -def test_list_inspect_templates_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_inspect_templates( - dlp.ListInspectTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_inspect_templates_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListInspectTemplatesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'organizations/sample1'} - - pager = client.list_inspect_templates(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.InspectTemplate) - for i in results) - - pages = list(client.list_inspect_templates(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteInspectTemplateRequest, - dict, -]) -def test_delete_inspect_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_inspect_template(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_inspect_template_rest_required_fields(request_type=dlp.DeleteInspectTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_inspect_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_inspect_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_inspect_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_inspect_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_inspect_template") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteInspectTemplateRequest.pb(dlp.DeleteInspectTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.DeleteInspectTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_inspect_template(request) - - -def test_delete_inspect_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/inspectTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_inspect_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, args[1]) - - -def test_delete_inspect_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_inspect_template( - dlp.DeleteInspectTemplateRequest(), - name='name_value', - ) - - -def test_delete_inspect_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateDeidentifyTemplateRequest, - dict, -]) -def test_create_deidentify_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_deidentify_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_deidentify_template_rest_required_fields(request_type=dlp.CreateDeidentifyTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_deidentify_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_deidentify_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_deidentify_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "deidentifyTemplate", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_deidentify_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_deidentify_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_deidentify_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.CreateDeidentifyTemplateRequest.pb(dlp.CreateDeidentifyTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) - - request = dlp.CreateDeidentifyTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DeidentifyTemplate() - - client.create_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_deidentify_template(request) - - -def test_create_deidentify_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_deidentify_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/deidentifyTemplates" % client.transport._host, args[1]) - - -def test_create_deidentify_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_deidentify_template( - dlp.CreateDeidentifyTemplateRequest(), - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - -def test_create_deidentify_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateDeidentifyTemplateRequest, - dict, -]) -def test_update_deidentify_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_deidentify_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_deidentify_template_rest_required_fields(request_type=dlp.UpdateDeidentifyTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_deidentify_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_deidentify_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_deidentify_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_deidentify_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_deidentify_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_deidentify_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.UpdateDeidentifyTemplateRequest.pb(dlp.UpdateDeidentifyTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) - - request = dlp.UpdateDeidentifyTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DeidentifyTemplate() - - client.update_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_deidentify_template(request) - - -def test_update_deidentify_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_deidentify_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) - - -def test_update_deidentify_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_deidentify_template( - dlp.UpdateDeidentifyTemplateRequest(), - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_deidentify_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetDeidentifyTemplateRequest, - dict, -]) -def test_get_deidentify_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_deidentify_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_deidentify_template_rest_required_fields(request_type=dlp.GetDeidentifyTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_deidentify_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_deidentify_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_deidentify_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_deidentify_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_deidentify_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_deidentify_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.GetDeidentifyTemplateRequest.pb(dlp.GetDeidentifyTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) - - request = dlp.GetDeidentifyTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DeidentifyTemplate() - - client.get_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.GetDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_deidentify_template(request) - - -def test_get_deidentify_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_deidentify_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) - - -def test_get_deidentify_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_deidentify_template( - dlp.GetDeidentifyTemplateRequest(), - name='name_value', - ) - - -def test_get_deidentify_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListDeidentifyTemplatesRequest, - dict, -]) -def test_list_deidentify_templates_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListDeidentifyTemplatesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_deidentify_templates(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeidentifyTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_deidentify_templates_rest_required_fields(request_type=dlp.ListDeidentifyTemplatesRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_deidentify_templates._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_deidentify_templates._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListDeidentifyTemplatesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_deidentify_templates(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_deidentify_templates_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_deidentify_templates._get_unset_required_fields({}) - assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_deidentify_templates_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_deidentify_templates") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_deidentify_templates") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ListDeidentifyTemplatesRequest.pb(dlp.ListDeidentifyTemplatesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ListDeidentifyTemplatesResponse.to_json(dlp.ListDeidentifyTemplatesResponse()) - - request = dlp.ListDeidentifyTemplatesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListDeidentifyTemplatesResponse() - - client.list_deidentify_templates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_deidentify_templates_rest_bad_request(transport: str = 'rest', request_type=dlp.ListDeidentifyTemplatesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_deidentify_templates(request) - - -def test_list_deidentify_templates_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListDeidentifyTemplatesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_deidentify_templates(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/deidentifyTemplates" % client.transport._host, args[1]) - - -def test_list_deidentify_templates_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_deidentify_templates( - dlp.ListDeidentifyTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_deidentify_templates_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListDeidentifyTemplatesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'organizations/sample1'} - - pager = client.list_deidentify_templates(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.DeidentifyTemplate) - for i in results) - - pages = list(client.list_deidentify_templates(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteDeidentifyTemplateRequest, - dict, -]) -def test_delete_deidentify_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_deidentify_template(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_deidentify_template_rest_required_fields(request_type=dlp.DeleteDeidentifyTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_deidentify_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_deidentify_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_deidentify_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_deidentify_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_deidentify_template") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteDeidentifyTemplateRequest.pb(dlp.DeleteDeidentifyTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.DeleteDeidentifyTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_deidentify_template(request) - - -def test_delete_deidentify_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_deidentify_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) - - -def test_delete_deidentify_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_deidentify_template( - dlp.DeleteDeidentifyTemplateRequest(), - name='name_value', - ) - - -def test_delete_deidentify_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateJobTriggerRequest, - dict, -]) -def test_create_job_trigger_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_create_job_trigger_rest_required_fields(request_type=dlp.CreateJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "jobTrigger", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.CreateJobTriggerRequest.pb(dlp.CreateJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) - - request = dlp.CreateJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.JobTrigger() - - client.create_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_job_trigger(request) - - -def test_create_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1]) - - -def test_create_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job_trigger( - dlp.CreateJobTriggerRequest(), - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - -def test_create_job_trigger_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateJobTriggerRequest, - dict, -]) -def test_update_job_trigger_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_update_job_trigger_rest_required_fields(request_type=dlp.UpdateJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.UpdateJobTriggerRequest.pb(dlp.UpdateJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) - - request = dlp.UpdateJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.JobTrigger() - - client.update_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_job_trigger(request) - - -def test_update_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) - - -def test_update_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_job_trigger( - dlp.UpdateJobTriggerRequest(), - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_job_trigger_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.HybridInspectJobTriggerRequest, - dict, -]) -def test_hybrid_inspect_job_trigger_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.hybrid_inspect_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -def test_hybrid_inspect_job_trigger_rest_required_fields(request_type=dlp.HybridInspectJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.hybrid_inspect_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_hybrid_inspect_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.hybrid_inspect_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_hybrid_inspect_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.HybridInspectJobTriggerRequest.pb(dlp.HybridInspectJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.HybridInspectResponse.to_json(dlp.HybridInspectResponse()) - - request = dlp.HybridInspectJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.HybridInspectResponse() - - client.hybrid_inspect_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_hybrid_inspect_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.HybridInspectJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.hybrid_inspect_job_trigger(request) - - -def test_hybrid_inspect_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.hybrid_inspect_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect" % client.transport._host, args[1]) - - -def test_hybrid_inspect_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.hybrid_inspect_job_trigger( - dlp.HybridInspectJobTriggerRequest(), - name='name_value', - ) - - -def test_hybrid_inspect_job_trigger_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetJobTriggerRequest, - dict, -]) -def test_get_job_trigger_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_get_job_trigger_rest_required_fields(request_type=dlp.GetJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.GetJobTriggerRequest.pb(dlp.GetJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) - - request = dlp.GetJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.JobTrigger() - - client.get_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.GetJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_job_trigger(request) - - -def test_get_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) - - -def test_get_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job_trigger( - dlp.GetJobTriggerRequest(), - name='name_value', - ) - - -def test_get_job_trigger_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListJobTriggersRequest, - dict, -]) -def test_list_job_triggers_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListJobTriggersResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_job_triggers(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTriggersPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_job_triggers_rest_required_fields(request_type=dlp.ListJobTriggersRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_job_triggers._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_job_triggers._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "location_id", "order_by", "page_size", "page_token", "type_", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListJobTriggersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_job_triggers(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_job_triggers_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_job_triggers._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "locationId", "orderBy", "pageSize", "pageToken", "type", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_job_triggers_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_job_triggers") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_job_triggers") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ListJobTriggersRequest.pb(dlp.ListJobTriggersRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ListJobTriggersResponse.to_json(dlp.ListJobTriggersResponse()) - - request = dlp.ListJobTriggersRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListJobTriggersResponse() - - client.list_job_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_job_triggers_rest_bad_request(transport: str = 'rest', request_type=dlp.ListJobTriggersRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_job_triggers(request) - - -def test_list_job_triggers_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListJobTriggersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_job_triggers(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1]) - - -def test_list_job_triggers_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_job_triggers( - dlp.ListJobTriggersRequest(), - parent='parent_value', - ) - - -def test_list_job_triggers_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListJobTriggersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1'} - - pager = client.list_job_triggers(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.JobTrigger) - for i in results) - - pages = list(client.list_job_triggers(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteJobTriggerRequest, - dict, -]) -def test_delete_job_trigger_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_job_trigger(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_trigger_rest_required_fields(request_type=dlp.DeleteJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_job_trigger") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteJobTriggerRequest.pb(dlp.DeleteJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.DeleteJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_job_trigger(request) - - -def test_delete_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) - - -def test_delete_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job_trigger( - dlp.DeleteJobTriggerRequest(), - name='name_value', - ) - - -def test_delete_job_trigger_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ActivateJobTriggerRequest, - dict, -]) -def test_activate_job_trigger_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.activate_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_activate_job_trigger_rest_required_fields(request_type=dlp.ActivateJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).activate_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).activate_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.activate_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_activate_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.activate_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_activate_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_activate_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_activate_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ActivateJobTriggerRequest.pb(dlp.ActivateJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) - - request = dlp.ActivateJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DlpJob() - - client.activate_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_activate_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.ActivateJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.activate_job_trigger(request) - - -def test_activate_job_trigger_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateDlpJobRequest, - dict, -]) -def test_create_dlp_job_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_dlp_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_create_dlp_job_rest_required_fields(request_type=dlp.CreateDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_dlp_job") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_dlp_job") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.CreateDlpJobRequest.pb(dlp.CreateDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) - - request = dlp.CreateDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DlpJob() - - client.create_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_dlp_job(request) - - -def test_create_dlp_job_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_dlp_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1]) - - -def test_create_dlp_job_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dlp_job( - dlp.CreateDlpJobRequest(), - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - -def test_create_dlp_job_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListDlpJobsRequest, - dict, -]) -def test_list_dlp_jobs_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListDlpJobsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_dlp_jobs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDlpJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_dlp_jobs_rest_required_fields(request_type=dlp.ListDlpJobsRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_dlp_jobs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_dlp_jobs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "location_id", "order_by", "page_size", "page_token", "type_", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListDlpJobsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_dlp_jobs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_dlp_jobs_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_dlp_jobs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "locationId", "orderBy", "pageSize", "pageToken", "type", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_dlp_jobs_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_dlp_jobs") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_dlp_jobs") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ListDlpJobsRequest.pb(dlp.ListDlpJobsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ListDlpJobsResponse.to_json(dlp.ListDlpJobsResponse()) - - request = dlp.ListDlpJobsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListDlpJobsResponse() - - client.list_dlp_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_dlp_jobs_rest_bad_request(transport: str = 'rest', request_type=dlp.ListDlpJobsRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_dlp_jobs(request) - - -def test_list_dlp_jobs_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListDlpJobsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_dlp_jobs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1]) - - -def test_list_dlp_jobs_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_dlp_jobs( - dlp.ListDlpJobsRequest(), - parent='parent_value', - ) - - -def test_list_dlp_jobs_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListDlpJobsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1'} - - pager = client.list_dlp_jobs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.DlpJob) - for i in results) - - pages = list(client.list_dlp_jobs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - dlp.GetDlpJobRequest, - dict, -]) -def test_get_dlp_job_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_dlp_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_get_dlp_job_rest_required_fields(request_type=dlp.GetDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_dlp_job") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_dlp_job") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.GetDlpJobRequest.pb(dlp.GetDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) - - request = dlp.GetDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DlpJob() - - client.get_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.GetDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_dlp_job(request) - - -def test_get_dlp_job_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/dlpJobs/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_dlp_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1]) - - -def test_get_dlp_job_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_dlp_job( - dlp.GetDlpJobRequest(), - name='name_value', - ) - - -def test_get_dlp_job_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteDlpJobRequest, - dict, -]) -def test_delete_dlp_job_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_dlp_job(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_dlp_job_rest_required_fields(request_type=dlp.DeleteDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_dlp_job") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteDlpJobRequest.pb(dlp.DeleteDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.DeleteDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_dlp_job(request) - - -def test_delete_dlp_job_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/dlpJobs/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_dlp_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1]) - - -def test_delete_dlp_job_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dlp_job( - dlp.DeleteDlpJobRequest(), - name='name_value', - ) - - -def test_delete_dlp_job_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CancelDlpJobRequest, - dict, -]) -def test_cancel_dlp_job_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.cancel_dlp_job(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_cancel_dlp_job_rest_required_fields(request_type=dlp.CancelDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.cancel_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_cancel_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.cancel_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_cancel_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_cancel_dlp_job") as pre: - pre.assert_not_called() - pb_message = dlp.CancelDlpJobRequest.pb(dlp.CancelDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.CancelDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.cancel_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_cancel_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.CancelDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_dlp_job(request) - - -def test_cancel_dlp_job_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateStoredInfoTypeRequest, - dict, -]) -def test_create_stored_info_type_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_stored_info_type(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_create_stored_info_type_rest_required_fields(request_type=dlp.CreateStoredInfoTypeRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_stored_info_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_stored_info_type_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_stored_info_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "config", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_stored_info_type_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_stored_info_type") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_stored_info_type") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.CreateStoredInfoTypeRequest.pb(dlp.CreateStoredInfoTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) - - request = dlp.CreateStoredInfoTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.StoredInfoType() - - client.create_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_stored_info_type(request) - - -def test_create_stored_info_type_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_stored_info_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, args[1]) - - -def test_create_stored_info_type_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_stored_info_type( - dlp.CreateStoredInfoTypeRequest(), - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - -def test_create_stored_info_type_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateStoredInfoTypeRequest, - dict, -]) -def test_update_stored_info_type_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_stored_info_type(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_update_stored_info_type_rest_required_fields(request_type=dlp.UpdateStoredInfoTypeRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_stored_info_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_stored_info_type_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_stored_info_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_stored_info_type_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_stored_info_type") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_stored_info_type") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.UpdateStoredInfoTypeRequest.pb(dlp.UpdateStoredInfoTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) - - request = dlp.UpdateStoredInfoTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.StoredInfoType() - - client.update_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_stored_info_type(request) - - -def test_update_stored_info_type_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_stored_info_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) - - -def test_update_stored_info_type_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_stored_info_type( - dlp.UpdateStoredInfoTypeRequest(), - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_stored_info_type_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetStoredInfoTypeRequest, - dict, -]) -def test_get_stored_info_type_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_stored_info_type(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_get_stored_info_type_rest_required_fields(request_type=dlp.GetStoredInfoTypeRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_stored_info_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_stored_info_type_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_stored_info_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_stored_info_type_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_stored_info_type") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_stored_info_type") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.GetStoredInfoTypeRequest.pb(dlp.GetStoredInfoTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) - - request = dlp.GetStoredInfoTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.StoredInfoType() - - client.get_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.GetStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_stored_info_type(request) - - -def test_get_stored_info_type_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_stored_info_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) - - -def test_get_stored_info_type_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_stored_info_type( - dlp.GetStoredInfoTypeRequest(), - name='name_value', - ) - - -def test_get_stored_info_type_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListStoredInfoTypesRequest, - dict, -]) -def test_list_stored_info_types_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListStoredInfoTypesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_stored_info_types(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListStoredInfoTypesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_stored_info_types_rest_required_fields(request_type=dlp.ListStoredInfoTypesRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_stored_info_types._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_stored_info_types._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListStoredInfoTypesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_stored_info_types(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_stored_info_types_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_stored_info_types._get_unset_required_fields({}) - assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_stored_info_types_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_stored_info_types") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_stored_info_types") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ListStoredInfoTypesRequest.pb(dlp.ListStoredInfoTypesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ListStoredInfoTypesResponse.to_json(dlp.ListStoredInfoTypesResponse()) - - request = dlp.ListStoredInfoTypesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListStoredInfoTypesResponse() - - client.list_stored_info_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_stored_info_types_rest_bad_request(transport: str = 'rest', request_type=dlp.ListStoredInfoTypesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_stored_info_types(request) - - -def test_list_stored_info_types_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListStoredInfoTypesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_stored_info_types(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, args[1]) - - -def test_list_stored_info_types_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_stored_info_types( - dlp.ListStoredInfoTypesRequest(), - parent='parent_value', - ) - - -def test_list_stored_info_types_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListStoredInfoTypesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'organizations/sample1'} - - pager = client.list_stored_info_types(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.StoredInfoType) - for i in results) - - pages = list(client.list_stored_info_types(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteStoredInfoTypeRequest, - dict, -]) -def test_delete_stored_info_type_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_stored_info_type(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_stored_info_type_rest_required_fields(request_type=dlp.DeleteStoredInfoTypeRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_stored_info_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_stored_info_type_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_stored_info_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_stored_info_type_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_stored_info_type") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteStoredInfoTypeRequest.pb(dlp.DeleteStoredInfoTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.DeleteStoredInfoTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_stored_info_type(request) - - -def test_delete_stored_info_type_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_stored_info_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) - - -def test_delete_stored_info_type_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_stored_info_type( - dlp.DeleteStoredInfoTypeRequest(), - name='name_value', - ) - - -def test_delete_stored_info_type_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.HybridInspectDlpJobRequest, - dict, -]) -def test_hybrid_inspect_dlp_job_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.hybrid_inspect_dlp_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -def test_hybrid_inspect_dlp_job_rest_required_fields(request_type=dlp.HybridInspectDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.hybrid_inspect_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_hybrid_inspect_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.hybrid_inspect_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_hybrid_inspect_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_dlp_job") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_dlp_job") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.HybridInspectDlpJobRequest.pb(dlp.HybridInspectDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.HybridInspectResponse.to_json(dlp.HybridInspectResponse()) - - request = dlp.HybridInspectDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.HybridInspectResponse() - - client.hybrid_inspect_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_hybrid_inspect_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.HybridInspectDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.hybrid_inspect_dlp_job(request) - - -def test_hybrid_inspect_dlp_job_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.hybrid_inspect_dlp_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect" % client.transport._host, args[1]) - - -def test_hybrid_inspect_dlp_job_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.hybrid_inspect_dlp_job( - dlp.HybridInspectDlpJobRequest(), - name='name_value', - ) - - -def test_hybrid_inspect_dlp_job_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.FinishDlpJobRequest, - dict, -]) -def test_finish_dlp_job_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.finish_dlp_job(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_finish_dlp_job_rest_required_fields(request_type=dlp.FinishDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).finish_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).finish_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.finish_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_finish_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.finish_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_finish_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_finish_dlp_job") as pre: - pre.assert_not_called() - pb_message = dlp.FinishDlpJobRequest.pb(dlp.FinishDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.FinishDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.finish_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_finish_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.FinishDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.finish_dlp_job(request) - - -def test_finish_dlp_job_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DlpServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DlpServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceGrpcAsyncIOTransport, - transports.DlpServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = DlpServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DlpServiceGrpcTransport, - ) - -def test_dlp_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DlpServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_dlp_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DlpServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'inspect_content', - 'redact_image', - 'deidentify_content', - 'reidentify_content', - 'list_info_types', - 'create_inspect_template', - 'update_inspect_template', - 'get_inspect_template', - 'list_inspect_templates', - 'delete_inspect_template', - 'create_deidentify_template', - 'update_deidentify_template', - 'get_deidentify_template', - 'list_deidentify_templates', - 'delete_deidentify_template', - 'create_job_trigger', - 'update_job_trigger', - 'hybrid_inspect_job_trigger', - 'get_job_trigger', - 'list_job_triggers', - 'delete_job_trigger', - 'activate_job_trigger', - 'create_dlp_job', - 'list_dlp_jobs', - 'get_dlp_job', - 'delete_dlp_job', - 'cancel_dlp_job', - 'create_stored_info_type', - 'update_stored_info_type', - 'get_stored_info_type', - 'list_stored_info_types', - 'delete_stored_info_type', - 'hybrid_inspect_dlp_job', - 'finish_dlp_job', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_dlp_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DlpServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_dlp_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DlpServiceTransport() - adc.assert_called_once() - - -def test_dlp_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DlpServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceGrpcAsyncIOTransport, - ], -) -def test_dlp_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceGrpcAsyncIOTransport, - transports.DlpServiceRestTransport, - ], -) -def test_dlp_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DlpServiceGrpcTransport, grpc_helpers), - (transports.DlpServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_dlp_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dlp.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dlp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) -def test_dlp_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_dlp_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.DlpServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_dlp_service_host_no_port(transport_name): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dlp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dlp.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_dlp_service_host_with_port(transport_name): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dlp.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dlp.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_dlp_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = DlpServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = DlpServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.inspect_content._session - session2 = client2.transport.inspect_content._session - assert session1 != session2 - session1 = client1.transport.redact_image._session - session2 = client2.transport.redact_image._session - assert session1 != session2 - session1 = client1.transport.deidentify_content._session - session2 = client2.transport.deidentify_content._session - assert session1 != session2 - session1 = client1.transport.reidentify_content._session - session2 = client2.transport.reidentify_content._session - assert session1 != session2 - session1 = client1.transport.list_info_types._session - session2 = client2.transport.list_info_types._session - assert session1 != session2 - session1 = client1.transport.create_inspect_template._session - session2 = client2.transport.create_inspect_template._session - assert session1 != session2 - session1 = client1.transport.update_inspect_template._session - session2 = client2.transport.update_inspect_template._session - assert session1 != session2 - session1 = client1.transport.get_inspect_template._session - session2 = client2.transport.get_inspect_template._session - assert session1 != session2 - session1 = client1.transport.list_inspect_templates._session - session2 = client2.transport.list_inspect_templates._session - assert session1 != session2 - session1 = client1.transport.delete_inspect_template._session - session2 = client2.transport.delete_inspect_template._session - assert session1 != session2 - session1 = client1.transport.create_deidentify_template._session - session2 = client2.transport.create_deidentify_template._session - assert session1 != session2 - session1 = client1.transport.update_deidentify_template._session - session2 = client2.transport.update_deidentify_template._session - assert session1 != session2 - session1 = client1.transport.get_deidentify_template._session - session2 = client2.transport.get_deidentify_template._session - assert session1 != session2 - session1 = client1.transport.list_deidentify_templates._session - session2 = client2.transport.list_deidentify_templates._session - assert session1 != session2 - session1 = client1.transport.delete_deidentify_template._session - session2 = client2.transport.delete_deidentify_template._session - assert session1 != session2 - session1 = client1.transport.create_job_trigger._session - session2 = client2.transport.create_job_trigger._session - assert session1 != session2 - session1 = client1.transport.update_job_trigger._session - session2 = client2.transport.update_job_trigger._session - assert session1 != session2 - session1 = client1.transport.hybrid_inspect_job_trigger._session - session2 = client2.transport.hybrid_inspect_job_trigger._session - assert session1 != session2 - session1 = client1.transport.get_job_trigger._session - session2 = client2.transport.get_job_trigger._session - assert session1 != session2 - session1 = client1.transport.list_job_triggers._session - session2 = client2.transport.list_job_triggers._session - assert session1 != session2 - session1 = client1.transport.delete_job_trigger._session - session2 = client2.transport.delete_job_trigger._session - assert session1 != session2 - session1 = client1.transport.activate_job_trigger._session - session2 = client2.transport.activate_job_trigger._session - assert session1 != session2 - session1 = client1.transport.create_dlp_job._session - session2 = client2.transport.create_dlp_job._session - assert session1 != session2 - session1 = client1.transport.list_dlp_jobs._session - session2 = client2.transport.list_dlp_jobs._session - assert session1 != session2 - session1 = client1.transport.get_dlp_job._session - session2 = client2.transport.get_dlp_job._session - assert session1 != session2 - session1 = client1.transport.delete_dlp_job._session - session2 = client2.transport.delete_dlp_job._session - assert session1 != session2 - session1 = client1.transport.cancel_dlp_job._session - session2 = client2.transport.cancel_dlp_job._session - assert session1 != session2 - session1 = client1.transport.create_stored_info_type._session - session2 = client2.transport.create_stored_info_type._session - assert session1 != session2 - session1 = client1.transport.update_stored_info_type._session - session2 = client2.transport.update_stored_info_type._session - assert session1 != session2 - session1 = client1.transport.get_stored_info_type._session - session2 = client2.transport.get_stored_info_type._session - assert session1 != session2 - session1 = client1.transport.list_stored_info_types._session - session2 = client2.transport.list_stored_info_types._session - assert session1 != session2 - session1 = client1.transport.delete_stored_info_type._session - session2 = client2.transport.delete_stored_info_type._session - assert session1 != session2 - session1 = client1.transport.hybrid_inspect_dlp_job._session - session2 = client2.transport.hybrid_inspect_dlp_job._session - assert session1 != session2 - session1 = client1.transport.finish_dlp_job._session - session2 = client2.transport.finish_dlp_job._session - assert session1 != session2 -def test_dlp_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DlpServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_dlp_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DlpServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) -def test_dlp_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) -def test_dlp_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_deidentify_template_path(): - organization = "squid" - deidentify_template = "clam" - expected = "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) - actual = DlpServiceClient.deidentify_template_path(organization, deidentify_template) - assert expected == actual - - -def test_parse_deidentify_template_path(): - expected = { - "organization": "whelk", - "deidentify_template": "octopus", - } - path = DlpServiceClient.deidentify_template_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_deidentify_template_path(path) - assert expected == actual - -def test_dlp_content_path(): - project = "oyster" - expected = "projects/{project}/dlpContent".format(project=project, ) - actual = DlpServiceClient.dlp_content_path(project) - assert expected == actual - - -def test_parse_dlp_content_path(): - expected = { - "project": "nudibranch", - } - path = DlpServiceClient.dlp_content_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_dlp_content_path(path) - assert expected == actual - -def test_dlp_job_path(): - project = "cuttlefish" - dlp_job = "mussel" - expected = "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) - actual = DlpServiceClient.dlp_job_path(project, dlp_job) - assert expected == actual - - -def test_parse_dlp_job_path(): - expected = { - "project": "winkle", - "dlp_job": "nautilus", - } - path = DlpServiceClient.dlp_job_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_dlp_job_path(path) - assert expected == actual - -def test_finding_path(): - project = "scallop" - location = "abalone" - finding = "squid" - expected = "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) - actual = DlpServiceClient.finding_path(project, location, finding) - assert expected == actual - - -def test_parse_finding_path(): - expected = { - "project": "clam", - "location": "whelk", - "finding": "octopus", - } - path = DlpServiceClient.finding_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_finding_path(path) - assert expected == actual - -def test_inspect_template_path(): - organization = "oyster" - inspect_template = "nudibranch" - expected = "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) - actual = DlpServiceClient.inspect_template_path(organization, inspect_template) - assert expected == actual - - -def test_parse_inspect_template_path(): - expected = { - "organization": "cuttlefish", - "inspect_template": "mussel", - } - path = DlpServiceClient.inspect_template_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_inspect_template_path(path) - assert expected == actual - -def test_job_trigger_path(): - project = "winkle" - job_trigger = "nautilus" - expected = "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) - actual = DlpServiceClient.job_trigger_path(project, job_trigger) - assert expected == actual - - -def test_parse_job_trigger_path(): - expected = { - "project": "scallop", - "job_trigger": "abalone", - } - path = DlpServiceClient.job_trigger_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_job_trigger_path(path) - assert expected == actual - -def test_stored_info_type_path(): - organization = "squid" - stored_info_type = "clam" - expected = "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) - actual = DlpServiceClient.stored_info_type_path(organization, stored_info_type) - assert expected == actual - - -def test_parse_stored_info_type_path(): - expected = { - "organization": "whelk", - "stored_info_type": "octopus", - } - path = DlpServiceClient.stored_info_type_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_stored_info_type_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DlpServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = DlpServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = DlpServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = DlpServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DlpServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = DlpServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = DlpServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = DlpServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DlpServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = DlpServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = DlpServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/tests/unit/gapic/dlp_v2/test_dlp_service.py b/tests/unit/gapic/dlp_v2/test_dlp_service.py index 834c65e8..d7d5851c 100644 --- a/tests/unit/gapic/dlp_v2/test_dlp_service.py +++ b/tests/unit/gapic/dlp_v2/test_dlp_service.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -35,6 +37,7 @@ from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore from google.type import date_pb2 # type: ignore @@ -45,6 +48,8 @@ from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.dlp_v2.services.dlp_service import ( DlpServiceAsyncClient, @@ -101,6 +106,7 @@ def test__get_default_mtls_endpoint(): [ (DlpServiceClient, "grpc"), (DlpServiceAsyncClient, "grpc_asyncio"), + (DlpServiceClient, "rest"), ], ) def test_dlp_service_client_from_service_account_info(client_class, transport_name): @@ -114,7 +120,11 @@ def test_dlp_service_client_from_service_account_info(client_class, transport_na assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dlp.googleapis.com:443") + assert client.transport._host == ( + "dlp.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dlp.googleapis.com" + ) @pytest.mark.parametrize( @@ -122,6 +132,7 @@ def test_dlp_service_client_from_service_account_info(client_class, transport_na [ (transports.DlpServiceGrpcTransport, "grpc"), (transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DlpServiceRestTransport, "rest"), ], ) def test_dlp_service_client_service_account_always_use_jwt( @@ -147,6 +158,7 @@ def test_dlp_service_client_service_account_always_use_jwt( [ (DlpServiceClient, "grpc"), (DlpServiceAsyncClient, "grpc_asyncio"), + (DlpServiceClient, "rest"), ], ) def test_dlp_service_client_from_service_account_file(client_class, transport_name): @@ -167,13 +179,18 @@ def test_dlp_service_client_from_service_account_file(client_class, transport_na assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dlp.googleapis.com:443") + assert client.transport._host == ( + "dlp.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dlp.googleapis.com" + ) def test_dlp_service_client_get_transport_class(): transport = DlpServiceClient.get_transport_class() available_transports = [ transports.DlpServiceGrpcTransport, + transports.DlpServiceRestTransport, ] assert transport in available_transports @@ -190,6 +207,7 @@ def test_dlp_service_client_get_transport_class(): transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), ], ) @mock.patch.object( @@ -333,6 +351,8 @@ def test_dlp_service_client_client_options( "grpc_asyncio", "false", ), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "true"), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -526,6 +546,7 @@ def test_dlp_service_client_get_mtls_endpoint_and_cert_source(client_class): transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), ], ) def test_dlp_service_client_client_options_scopes( @@ -561,6 +582,7 @@ def test_dlp_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest", None), ], ) def test_dlp_service_client_client_options_credentials_file( @@ -9409,161 +9431,8879 @@ async def test_finish_dlp_job_field_headers_async(): ) in kw["metadata"] -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DlpServiceGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + dlp.InspectContentRequest, + dict, + ], +) +def test_inspect_content_rest(request_type): + client = DlpServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.DlpServiceGrpcTransport( + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectContentResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.inspect_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_inspect_content_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), ) - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_inspect_content" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_inspect_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.InspectContentRequest.pb(dlp.InspectContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectContentResponse.to_json( + dlp.InspectContentResponse() ) - # It is an error to provide an api_key and a transport instance. - transport = transports.DlpServiceGrpcTransport( + request = dlp.InspectContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectContentResponse() + + client.inspect_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_inspect_content_rest_bad_request( + transport: str = "rest", request_type=dlp.InspectContentRequest +): + client = DlpServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options=options, - transport=transport, - ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) - # It is an error to provide scopes and a transport instance. - transport = transports.DlpServiceGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.inspect_content(request) + + +def test_inspect_content_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.RedactImageRequest, + dict, + ], +) +def test_redact_image_rest(request_type): + client = DlpServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.RedactImageResponse( + redacted_image=b"redacted_image_blob", + extracted_text="extracted_text_value", ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.RedactImageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DlpServiceGrpcTransport( + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.redact_image(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + assert response.redacted_image == b"redacted_image_blob" + assert response.extracted_text == "extracted_text_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_redact_image_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), ) client = DlpServiceClient(transport=transport) - assert client.transport is transport + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_redact_image" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_redact_image" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.RedactImageRequest.pb(dlp.RedactImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.RedactImageResponse.to_json( + dlp.RedactImageResponse() + ) + request = dlp.RedactImageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.RedactImageResponse() + + client.redact_image( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DlpServiceGrpcTransport( + pre.assert_called_once() + post.assert_called_once() + + +def test_redact_image_rest_bad_request( + transport: str = "rest", request_type=dlp.RedactImageRequest +): + client = DlpServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel - transport = transports.DlpServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.redact_image(request) + + +def test_redact_image_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - channel = transport.grpc_channel - assert channel @pytest.mark.parametrize( - "transport_class", + "request_type", [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceGrpcAsyncIOTransport, + dlp.DeidentifyContentRequest, + dict, ], ) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() +def test_deidentify_content_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyContentResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.deidentify_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_deidentify_content_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_deidentify_content" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_deidentify_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.DeidentifyContentRequest.pb(dlp.DeidentifyContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyContentResponse.to_json( + dlp.DeidentifyContentResponse() + ) + + request = dlp.DeidentifyContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyContentResponse() + + client.deidentify_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_deidentify_content_rest_bad_request( + transport: str = "rest", request_type=dlp.DeidentifyContentRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.deidentify_content(request) + + +def test_deidentify_content_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + dlp.ReidentifyContentRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = DlpServiceClient.get_transport_class(transport_name)( +def test_reidentify_content_rest(request_type): + client = DlpServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == transport_name + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ReidentifyContentResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ReidentifyContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.reidentify_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +def test_reidentify_content_rest_required_fields( + request_type=dlp.ReidentifyContentRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reidentify_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reidentify_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. client = DlpServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert isinstance( - client.transport, - transports.DlpServiceGrpcTransport, + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ReidentifyContentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ReidentifyContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.reidentify_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_reidentify_content_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) + unset_fields = transport.reidentify_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) -def test_dlp_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DlpServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reidentify_content_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_reidentify_content" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_reidentify_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ReidentifyContentRequest.pb(dlp.ReidentifyContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ReidentifyContentResponse.to_json( + dlp.ReidentifyContentResponse() + ) -def test_dlp_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.DlpServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), + request = dlp.ReidentifyContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ReidentifyContentResponse() + + client.reidentify_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "inspect_content", - "redact_image", - "deidentify_content", - "reidentify_content", - "list_info_types", - "create_inspect_template", - "update_inspect_template", - "get_inspect_template", - "list_inspect_templates", - "delete_inspect_template", - "create_deidentify_template", - "update_deidentify_template", - "get_deidentify_template", - "list_deidentify_templates", - "delete_deidentify_template", - "create_job_trigger", - "update_job_trigger", - "hybrid_inspect_job_trigger", + pre.assert_called_once() + post.assert_called_once() + + +def test_reidentify_content_rest_bad_request( + transport: str = "rest", request_type=dlp.ReidentifyContentRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.reidentify_content(request) + + +def test_reidentify_content_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ListInfoTypesRequest, + dict, + ], +) +def test_list_info_types_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInfoTypesResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_info_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_info_types_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_list_info_types" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_list_info_types" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListInfoTypesRequest.pb(dlp.ListInfoTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListInfoTypesResponse.to_json( + dlp.ListInfoTypesResponse() + ) + + request = dlp.ListInfoTypesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListInfoTypesResponse() + + client.list_info_types( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_info_types_rest_bad_request( + transport: str = "rest", request_type=dlp.ListInfoTypesRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_info_types(request) + + +def test_list_info_types_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInfoTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_info_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/infoTypes" % client.transport._host, args[1] + ) + + +def test_list_info_types_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_info_types( + dlp.ListInfoTypesRequest(), + parent="parent_value", + ) + + +def test_list_info_types_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.CreateInspectTemplateRequest, + dict, + ], +) +def test_create_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_inspect_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +def test_create_inspect_template_rest_required_fields( + request_type=dlp.CreateInspectTemplateRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_inspect_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "inspectTemplate", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_create_inspect_template" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_create_inspect_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateInspectTemplateRequest.pb( + dlp.CreateInspectTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) + + request = dlp.CreateInspectTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectTemplate() + + client.create_inspect_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_inspect_template_rest_bad_request( + transport: str = "rest", request_type=dlp.CreateInspectTemplateRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_inspect_template(request) + + +def test_create_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=organizations/*}/inspectTemplates" % client.transport._host, + args[1], + ) + + +def test_create_inspect_template_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent="parent_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + ) + + +def test_create_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.UpdateInspectTemplateRequest, + dict, + ], +) +def test_update_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/inspectTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_inspect_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +def test_update_inspect_template_rest_required_fields( + request_type=dlp.UpdateInspectTemplateRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_inspect_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_update_inspect_template" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_update_inspect_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateInspectTemplateRequest.pb( + dlp.UpdateInspectTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) + + request = dlp.UpdateInspectTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectTemplate() + + client.update_inspect_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_inspect_template_rest_bad_request( + transport: str = "rest", request_type=dlp.UpdateInspectTemplateRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/inspectTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_inspect_template(request) + + +def test_update_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/inspectTemplates/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, + args[1], + ) + + +def test_update_inspect_template_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name="name_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.GetInspectTemplateRequest, + dict, + ], +) +def test_get_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/inspectTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_inspect_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +def test_get_inspect_template_rest_required_fields( + request_type=dlp.GetInspectTemplateRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_inspect_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_get_inspect_template" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_get_inspect_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetInspectTemplateRequest.pb(dlp.GetInspectTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) + + request = dlp.GetInspectTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectTemplate() + + client.get_inspect_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_inspect_template_rest_bad_request( + transport: str = "rest", request_type=dlp.GetInspectTemplateRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/inspectTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_inspect_template(request) + + +def test_get_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/inspectTemplates/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, + args[1], + ) + + +def test_get_inspect_template_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_inspect_template( + dlp.GetInspectTemplateRequest(), + name="name_value", + ) + + +def test_get_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ListInspectTemplatesRequest, + dict, + ], +) +def test_list_inspect_templates_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInspectTemplatesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_inspect_templates(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_inspect_templates_rest_required_fields( + request_type=dlp.ListInspectTemplatesRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_inspect_templates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_inspect_templates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "location_id", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListInspectTemplatesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_inspect_templates(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_inspect_templates_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_inspect_templates._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "locationId", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_inspect_templates_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_list_inspect_templates" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_list_inspect_templates" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListInspectTemplatesRequest.pb( + dlp.ListInspectTemplatesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListInspectTemplatesResponse.to_json( + dlp.ListInspectTemplatesResponse() + ) + + request = dlp.ListInspectTemplatesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListInspectTemplatesResponse() + + client.list_inspect_templates( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_inspect_templates_rest_bad_request( + transport: str = "rest", request_type=dlp.ListInspectTemplatesRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_inspect_templates(request) + + +def test_list_inspect_templates_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInspectTemplatesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_inspect_templates(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=organizations/*}/inspectTemplates" % client.transport._host, + args[1], + ) + + +def test_list_inspect_templates_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), + parent="parent_value", + ) + + +def test_list_inspect_templates_rest_pager(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token="abc", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token="def", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token="ghi", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListInspectTemplatesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1"} + + pager = client.list_inspect_templates(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.InspectTemplate) for i in results) + + pages = list(client.list_inspect_templates(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.DeleteInspectTemplateRequest, + dict, + ], +) +def test_delete_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/inspectTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_inspect_template(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_inspect_template_rest_required_fields( + request_type=dlp.DeleteInspectTemplateRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_inspect_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_delete_inspect_template" + ) as pre: + pre.assert_not_called() + pb_message = dlp.DeleteInspectTemplateRequest.pb( + dlp.DeleteInspectTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteInspectTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_inspect_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_inspect_template_rest_bad_request( + transport: str = "rest", request_type=dlp.DeleteInspectTemplateRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/inspectTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_inspect_template(request) + + +def test_delete_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/inspectTemplates/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, + args[1], + ) + + +def test_delete_inspect_template_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), + name="name_value", + ) + + +def test_delete_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.CreateDeidentifyTemplateRequest, + dict, + ], +) +def test_create_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +def test_create_deidentify_template_rest_required_fields( + request_type=dlp.CreateDeidentifyTemplateRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_deidentify_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "deidentifyTemplate", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_create_deidentify_template" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_create_deidentify_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateDeidentifyTemplateRequest.pb( + dlp.CreateDeidentifyTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyTemplate.to_json( + dlp.DeidentifyTemplate() + ) + + request = dlp.CreateDeidentifyTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyTemplate() + + client.create_deidentify_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_deidentify_template_rest_bad_request( + transport: str = "rest", request_type=dlp.CreateDeidentifyTemplateRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_deidentify_template(request) + + +def test_create_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=organizations/*}/deidentifyTemplates" + % client.transport._host, + args[1], + ) + + +def test_create_deidentify_template_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent="parent_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + ) + + +def test_create_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.UpdateDeidentifyTemplateRequest, + dict, + ], +) +def test_update_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/deidentifyTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +def test_update_deidentify_template_rest_required_fields( + request_type=dlp.UpdateDeidentifyTemplateRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_deidentify_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_update_deidentify_template" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_update_deidentify_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateDeidentifyTemplateRequest.pb( + dlp.UpdateDeidentifyTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyTemplate.to_json( + dlp.DeidentifyTemplate() + ) + + request = dlp.UpdateDeidentifyTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyTemplate() + + client.update_deidentify_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_deidentify_template_rest_bad_request( + transport: str = "rest", request_type=dlp.UpdateDeidentifyTemplateRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/deidentifyTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_deidentify_template(request) + + +def test_update_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/deidentifyTemplates/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=organizations/*/deidentifyTemplates/*}" + % client.transport._host, + args[1], + ) + + +def test_update_deidentify_template_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name="name_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.GetDeidentifyTemplateRequest, + dict, + ], +) +def test_get_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/deidentifyTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +def test_get_deidentify_template_rest_required_fields( + request_type=dlp.GetDeidentifyTemplateRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_deidentify_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_get_deidentify_template" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_get_deidentify_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetDeidentifyTemplateRequest.pb( + dlp.GetDeidentifyTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyTemplate.to_json( + dlp.DeidentifyTemplate() + ) + + request = dlp.GetDeidentifyTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyTemplate() + + client.get_deidentify_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_deidentify_template_rest_bad_request( + transport: str = "rest", request_type=dlp.GetDeidentifyTemplateRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/deidentifyTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_deidentify_template(request) + + +def test_get_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/deidentifyTemplates/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=organizations/*/deidentifyTemplates/*}" + % client.transport._host, + args[1], + ) + + +def test_get_deidentify_template_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), + name="name_value", + ) + + +def test_get_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ListDeidentifyTemplatesRequest, + dict, + ], +) +def test_list_deidentify_templates_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDeidentifyTemplatesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_deidentify_templates(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_deidentify_templates_rest_required_fields( + request_type=dlp.ListDeidentifyTemplatesRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deidentify_templates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deidentify_templates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "location_id", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListDeidentifyTemplatesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_deidentify_templates(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_deidentify_templates_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_deidentify_templates._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "locationId", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_deidentify_templates_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_list_deidentify_templates" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_list_deidentify_templates" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListDeidentifyTemplatesRequest.pb( + dlp.ListDeidentifyTemplatesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListDeidentifyTemplatesResponse.to_json( + dlp.ListDeidentifyTemplatesResponse() + ) + + request = dlp.ListDeidentifyTemplatesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListDeidentifyTemplatesResponse() + + client.list_deidentify_templates( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_deidentify_templates_rest_bad_request( + transport: str = "rest", request_type=dlp.ListDeidentifyTemplatesRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_deidentify_templates(request) + + +def test_list_deidentify_templates_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDeidentifyTemplatesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_deidentify_templates(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=organizations/*}/deidentifyTemplates" + % client.transport._host, + args[1], + ) + + +def test_list_deidentify_templates_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), + parent="parent_value", + ) + + +def test_list_deidentify_templates_rest_pager(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token="abc", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token="def", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token="ghi", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + dlp.ListDeidentifyTemplatesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1"} + + pager = client.list_deidentify_templates(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) for i in results) + + pages = list(client.list_deidentify_templates(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.DeleteDeidentifyTemplateRequest, + dict, + ], +) +def test_delete_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/deidentifyTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_deidentify_template_rest_required_fields( + request_type=dlp.DeleteDeidentifyTemplateRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_deidentify_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_delete_deidentify_template" + ) as pre: + pre.assert_not_called() + pb_message = dlp.DeleteDeidentifyTemplateRequest.pb( + dlp.DeleteDeidentifyTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteDeidentifyTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_deidentify_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_deidentify_template_rest_bad_request( + transport: str = "rest", request_type=dlp.DeleteDeidentifyTemplateRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/deidentifyTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_deidentify_template(request) + + +def test_delete_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/deidentifyTemplates/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=organizations/*/deidentifyTemplates/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_deidentify_template_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), + name="name_value", + ) + + +def test_delete_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.CreateJobTriggerRequest, + dict, + ], +) +def test_create_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger( + name="name_value", + display_name="display_name_value", + description="description_value", + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig( + storage_config=storage.StorageConfig( + datastore_options=storage.DatastoreOptions( + partition_id=storage.PartitionId(project_id="project_id_value") + ) + ) + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_create_job_trigger_rest_required_fields( + request_type=dlp.CreateJobTriggerRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_job_trigger(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "jobTrigger", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_create_job_trigger" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_create_job_trigger" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateJobTriggerRequest.pb(dlp.CreateJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) + + request = dlp.CreateJobTriggerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.JobTrigger() + + client.create_job_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_job_trigger_rest_bad_request( + transport: str = "rest", request_type=dlp.CreateJobTriggerRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_job_trigger(request) + + +def test_create_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + job_trigger=dlp.JobTrigger(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1] + ) + + +def test_create_job_trigger_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent="parent_value", + job_trigger=dlp.JobTrigger(name="name_value"), + ) + + +def test_create_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.UpdateJobTriggerRequest, + dict, + ], +) +def test_update_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/jobTriggers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger( + name="name_value", + display_name="display_name_value", + description="description_value", + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig( + storage_config=storage.StorageConfig( + datastore_options=storage.DatastoreOptions( + partition_id=storage.PartitionId(project_id="project_id_value") + ) + ) + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_update_job_trigger_rest_required_fields( + request_type=dlp.UpdateJobTriggerRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_job_trigger(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_update_job_trigger" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_update_job_trigger" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateJobTriggerRequest.pb(dlp.UpdateJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) + + request = dlp.UpdateJobTriggerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.JobTrigger() + + client.update_job_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_job_trigger_rest_bad_request( + transport: str = "rest", request_type=dlp.UpdateJobTriggerRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/jobTriggers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_job_trigger(request) + + +def test_update_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/jobTriggers/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + job_trigger=dlp.JobTrigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1] + ) + + +def test_update_job_trigger_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name="name_value", + job_trigger=dlp.JobTrigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.HybridInspectJobTriggerRequest, + dict, + ], +) +def test_hybrid_inspect_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobTriggers/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.hybrid_inspect_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_job_trigger_rest_required_fields( + request_type=dlp.HybridInspectJobTriggerRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.hybrid_inspect_job_trigger(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_hybrid_inspect_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.hybrid_inspect_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_hybrid_inspect_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_hybrid_inspect_job_trigger" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_job_trigger" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.HybridInspectJobTriggerRequest.pb( + dlp.HybridInspectJobTriggerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.HybridInspectResponse.to_json( + dlp.HybridInspectResponse() + ) + + request = dlp.HybridInspectJobTriggerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.HybridInspectResponse() + + client.hybrid_inspect_job_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_hybrid_inspect_job_trigger_rest_bad_request( + transport: str = "rest", request_type=dlp.HybridInspectJobTriggerRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobTriggers/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.hybrid_inspect_job_trigger(request) + + +def test_hybrid_inspect_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/jobTriggers/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.hybrid_inspect_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect" + % client.transport._host, + args[1], + ) + + +def test_hybrid_inspect_job_trigger_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), + name="name_value", + ) + + +def test_hybrid_inspect_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.GetJobTriggerRequest, + dict, + ], +) +def test_get_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/jobTriggers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger( + name="name_value", + display_name="display_name_value", + description="description_value", + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig( + storage_config=storage.StorageConfig( + datastore_options=storage.DatastoreOptions( + partition_id=storage.PartitionId(project_id="project_id_value") + ) + ) + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_get_job_trigger_rest_required_fields(request_type=dlp.GetJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_job_trigger(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_get_job_trigger" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_get_job_trigger" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetJobTriggerRequest.pb(dlp.GetJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) + + request = dlp.GetJobTriggerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.JobTrigger() + + client.get_job_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_job_trigger_rest_bad_request( + transport: str = "rest", request_type=dlp.GetJobTriggerRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/jobTriggers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_job_trigger(request) + + +def test_get_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/jobTriggers/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1] + ) + + +def test_get_job_trigger_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job_trigger( + dlp.GetJobTriggerRequest(), + name="name_value", + ) + + +def test_get_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ListJobTriggersRequest, + dict, + ], +) +def test_list_job_triggers_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListJobTriggersResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_job_triggers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_job_triggers_rest_required_fields( + request_type=dlp.ListJobTriggersRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_job_triggers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_job_triggers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "location_id", + "order_by", + "page_size", + "page_token", + "type_", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListJobTriggersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_job_triggers(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_job_triggers_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_job_triggers._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "locationId", + "orderBy", + "pageSize", + "pageToken", + "type", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_job_triggers_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_list_job_triggers" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_list_job_triggers" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListJobTriggersRequest.pb(dlp.ListJobTriggersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListJobTriggersResponse.to_json( + dlp.ListJobTriggersResponse() + ) + + request = dlp.ListJobTriggersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListJobTriggersResponse() + + client.list_job_triggers( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_job_triggers_rest_bad_request( + transport: str = "rest", request_type=dlp.ListJobTriggersRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_job_triggers(request) + + +def test_list_job_triggers_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListJobTriggersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_job_triggers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1] + ) + + +def test_list_job_triggers_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_job_triggers( + dlp.ListJobTriggersRequest(), + parent="parent_value", + ) + + +def test_list_job_triggers_rest_pager(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token="abc", + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token="def", + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token="ghi", + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListJobTriggersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1"} + + pager = client.list_job_triggers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.JobTrigger) for i in results) + + pages = list(client.list_job_triggers(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.DeleteJobTriggerRequest, + dict, + ], +) +def test_delete_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/jobTriggers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_job_trigger(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_trigger_rest_required_fields( + request_type=dlp.DeleteJobTriggerRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_job_trigger(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_delete_job_trigger" + ) as pre: + pre.assert_not_called() + pb_message = dlp.DeleteJobTriggerRequest.pb(dlp.DeleteJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteJobTriggerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_job_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_job_trigger_rest_bad_request( + transport: str = "rest", request_type=dlp.DeleteJobTriggerRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/jobTriggers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_job_trigger(request) + + +def test_delete_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/jobTriggers/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1] + ) + + +def test_delete_job_trigger_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), + name="name_value", + ) + + +def test_delete_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ActivateJobTriggerRequest, + dict, + ], +) +def test_activate_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/jobTriggers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob( + name="name_value", + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name="job_trigger_name_value", + risk_details=dlp.AnalyzeDataSourceRiskDetails( + requested_privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.activate_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == "name_value" + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == "job_trigger_name_value" + + +def test_activate_job_trigger_rest_required_fields( + request_type=dlp.ActivateJobTriggerRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).activate_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).activate_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.activate_job_trigger(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_activate_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.activate_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_activate_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_activate_job_trigger" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_activate_job_trigger" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ActivateJobTriggerRequest.pb(dlp.ActivateJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) + + request = dlp.ActivateJobTriggerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DlpJob() + + client.activate_job_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_activate_job_trigger_rest_bad_request( + transport: str = "rest", request_type=dlp.ActivateJobTriggerRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/jobTriggers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.activate_job_trigger(request) + + +def test_activate_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.CreateDlpJobRequest, + dict, + ], +) +def test_create_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob( + name="name_value", + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name="job_trigger_name_value", + risk_details=dlp.AnalyzeDataSourceRiskDetails( + requested_privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_dlp_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == "name_value" + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == "job_trigger_name_value" + + +def test_create_dlp_job_rest_required_fields(request_type=dlp.CreateDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_dlp_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_create_dlp_job" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_create_dlp_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateDlpJobRequest.pb(dlp.CreateDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) + + request = dlp.CreateDlpJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DlpJob() + + client.create_dlp_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_dlp_job_rest_bad_request( + transport: str = "rest", request_type=dlp.CreateDlpJobRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_dlp_job(request) + + +def test_create_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1] + ) + + +def test_create_dlp_job_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent="parent_value", + inspect_job=dlp.InspectJobConfig( + storage_config=storage.StorageConfig( + datastore_options=storage.DatastoreOptions( + partition_id=storage.PartitionId(project_id="project_id_value") + ) + ) + ), + risk_job=dlp.RiskAnalysisJobConfig( + privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ), + ) + + +def test_create_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ListDlpJobsRequest, + dict, + ], +) +def test_list_dlp_jobs_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDlpJobsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_dlp_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_dlp_jobs_rest_required_fields(request_type=dlp.ListDlpJobsRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_dlp_jobs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_dlp_jobs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "location_id", + "order_by", + "page_size", + "page_token", + "type_", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListDlpJobsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_dlp_jobs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_dlp_jobs_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_dlp_jobs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "locationId", + "orderBy", + "pageSize", + "pageToken", + "type", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_dlp_jobs_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_list_dlp_jobs" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_list_dlp_jobs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListDlpJobsRequest.pb(dlp.ListDlpJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListDlpJobsResponse.to_json( + dlp.ListDlpJobsResponse() + ) + + request = dlp.ListDlpJobsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListDlpJobsResponse() + + client.list_dlp_jobs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_dlp_jobs_rest_bad_request( + transport: str = "rest", request_type=dlp.ListDlpJobsRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_dlp_jobs(request) + + +def test_list_dlp_jobs_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDlpJobsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_dlp_jobs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1] + ) + + +def test_list_dlp_jobs_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), + parent="parent_value", + ) + + +def test_list_dlp_jobs_rest_pager(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token="abc", + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token="def", + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token="ghi", + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListDlpJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1"} + + pager = client.list_dlp_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DlpJob) for i in results) + + pages = list(client.list_dlp_jobs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.GetDlpJobRequest, + dict, + ], +) +def test_get_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/dlpJobs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob( + name="name_value", + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name="job_trigger_name_value", + risk_details=dlp.AnalyzeDataSourceRiskDetails( + requested_privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_dlp_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == "name_value" + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == "job_trigger_name_value" + + +def test_get_dlp_job_rest_required_fields(request_type=dlp.GetDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_dlp_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_get_dlp_job" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_get_dlp_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetDlpJobRequest.pb(dlp.GetDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) + + request = dlp.GetDlpJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DlpJob() + + client.get_dlp_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_dlp_job_rest_bad_request( + transport: str = "rest", request_type=dlp.GetDlpJobRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/dlpJobs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_dlp_job(request) + + +def test_get_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/dlpJobs/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1] + ) + + +def test_get_dlp_job_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dlp_job( + dlp.GetDlpJobRequest(), + name="name_value", + ) + + +def test_get_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.DeleteDlpJobRequest, + dict, + ], +) +def test_delete_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/dlpJobs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_dlp_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dlp_job_rest_required_fields(request_type=dlp.DeleteDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_dlp_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_delete_dlp_job" + ) as pre: + pre.assert_not_called() + pb_message = dlp.DeleteDlpJobRequest.pb(dlp.DeleteDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteDlpJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_dlp_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_dlp_job_rest_bad_request( + transport: str = "rest", request_type=dlp.DeleteDlpJobRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/dlpJobs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_dlp_job(request) + + +def test_delete_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/dlpJobs/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1] + ) + + +def test_delete_dlp_job_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), + name="name_value", + ) + + +def test_delete_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.CancelDlpJobRequest, + dict, + ], +) +def test_cancel_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/dlpJobs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.cancel_dlp_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_dlp_job_rest_required_fields(request_type=dlp.CancelDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_dlp_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_cancel_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.cancel_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_cancel_dlp_job" + ) as pre: + pre.assert_not_called() + pb_message = dlp.CancelDlpJobRequest.pb(dlp.CancelDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.CancelDlpJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.cancel_dlp_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_cancel_dlp_job_rest_bad_request( + transport: str = "rest", request_type=dlp.CancelDlpJobRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/dlpJobs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_dlp_job(request) + + +def test_cancel_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.CreateStoredInfoTypeRequest, + dict, + ], +) +def test_create_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == "name_value" + + +def test_create_stored_info_type_rest_required_fields( + request_type=dlp.CreateStoredInfoTypeRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_stored_info_type(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "config", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_create_stored_info_type" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_create_stored_info_type" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateStoredInfoTypeRequest.pb( + dlp.CreateStoredInfoTypeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) + + request = dlp.CreateStoredInfoTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.StoredInfoType() + + client.create_stored_info_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_stored_info_type_rest_bad_request( + transport: str = "rest", request_type=dlp.CreateStoredInfoTypeRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_stored_info_type(request) + + +def test_create_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, + args[1], + ) + + +def test_create_stored_info_type_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent="parent_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + ) + + +def test_create_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.UpdateStoredInfoTypeRequest, + dict, + ], +) +def test_update_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/storedInfoTypes/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == "name_value" + + +def test_update_stored_info_type_rest_required_fields( + request_type=dlp.UpdateStoredInfoTypeRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_stored_info_type(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_update_stored_info_type" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_update_stored_info_type" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateStoredInfoTypeRequest.pb( + dlp.UpdateStoredInfoTypeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) + + request = dlp.UpdateStoredInfoTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.StoredInfoType() + + client.update_stored_info_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_stored_info_type_rest_bad_request( + transport: str = "rest", request_type=dlp.UpdateStoredInfoTypeRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/storedInfoTypes/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_stored_info_type(request) + + +def test_update_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/storedInfoTypes/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, + args[1], + ) + + +def test_update_stored_info_type_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name="name_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.GetStoredInfoTypeRequest, + dict, + ], +) +def test_get_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/storedInfoTypes/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == "name_value" + + +def test_get_stored_info_type_rest_required_fields( + request_type=dlp.GetStoredInfoTypeRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_stored_info_type(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_get_stored_info_type" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_get_stored_info_type" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetStoredInfoTypeRequest.pb(dlp.GetStoredInfoTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) + + request = dlp.GetStoredInfoTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.StoredInfoType() + + client.get_stored_info_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_stored_info_type_rest_bad_request( + transport: str = "rest", request_type=dlp.GetStoredInfoTypeRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/storedInfoTypes/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_stored_info_type(request) + + +def test_get_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/storedInfoTypes/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, + args[1], + ) + + +def test_get_stored_info_type_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), + name="name_value", + ) + + +def test_get_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ListStoredInfoTypesRequest, + dict, + ], +) +def test_list_stored_info_types_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListStoredInfoTypesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_stored_info_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_stored_info_types_rest_required_fields( + request_type=dlp.ListStoredInfoTypesRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_stored_info_types._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_stored_info_types._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "location_id", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListStoredInfoTypesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_stored_info_types(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_stored_info_types_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_stored_info_types._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "locationId", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_stored_info_types_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_list_stored_info_types" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_list_stored_info_types" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListStoredInfoTypesRequest.pb(dlp.ListStoredInfoTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListStoredInfoTypesResponse.to_json( + dlp.ListStoredInfoTypesResponse() + ) + + request = dlp.ListStoredInfoTypesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListStoredInfoTypesResponse() + + client.list_stored_info_types( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_stored_info_types_rest_bad_request( + transport: str = "rest", request_type=dlp.ListStoredInfoTypesRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_stored_info_types(request) + + +def test_list_stored_info_types_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListStoredInfoTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_stored_info_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, + args[1], + ) + + +def test_list_stored_info_types_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), + parent="parent_value", + ) + + +def test_list_stored_info_types_rest_pager(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token="abc", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token="def", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token="ghi", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListStoredInfoTypesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1"} + + pager = client.list_stored_info_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.StoredInfoType) for i in results) + + pages = list(client.list_stored_info_types(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.DeleteStoredInfoTypeRequest, + dict, + ], +) +def test_delete_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/storedInfoTypes/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_stored_info_type_rest_required_fields( + request_type=dlp.DeleteStoredInfoTypeRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_stored_info_type(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_delete_stored_info_type" + ) as pre: + pre.assert_not_called() + pb_message = dlp.DeleteStoredInfoTypeRequest.pb( + dlp.DeleteStoredInfoTypeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteStoredInfoTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_stored_info_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_stored_info_type_rest_bad_request( + transport: str = "rest", request_type=dlp.DeleteStoredInfoTypeRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/storedInfoTypes/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_stored_info_type(request) + + +def test_delete_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/storedInfoTypes/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, + args[1], + ) + + +def test_delete_stored_info_type_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), + name="name_value", + ) + + +def test_delete_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.HybridInspectDlpJobRequest, + dict, + ], +) +def test_hybrid_inspect_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dlpJobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.hybrid_inspect_dlp_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_dlp_job_rest_required_fields( + request_type=dlp.HybridInspectDlpJobRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.hybrid_inspect_dlp_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_hybrid_inspect_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.hybrid_inspect_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_hybrid_inspect_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_hybrid_inspect_dlp_job" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_dlp_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.HybridInspectDlpJobRequest.pb(dlp.HybridInspectDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.HybridInspectResponse.to_json( + dlp.HybridInspectResponse() + ) + + request = dlp.HybridInspectDlpJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.HybridInspectResponse() + + client.hybrid_inspect_dlp_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_hybrid_inspect_dlp_job_rest_bad_request( + transport: str = "rest", request_type=dlp.HybridInspectDlpJobRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dlpJobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.hybrid_inspect_dlp_job(request) + + +def test_hybrid_inspect_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/dlpJobs/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.hybrid_inspect_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect" + % client.transport._host, + args[1], + ) + + +def test_hybrid_inspect_dlp_job_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), + name="name_value", + ) + + +def test_hybrid_inspect_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.FinishDlpJobRequest, + dict, + ], +) +def test_finish_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dlpJobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.finish_dlp_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_finish_dlp_job_rest_required_fields(request_type=dlp.FinishDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).finish_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).finish_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.finish_dlp_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_finish_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.finish_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_finish_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_finish_dlp_job" + ) as pre: + pre.assert_not_called() + pb_message = dlp.FinishDlpJobRequest.pb(dlp.FinishDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.FinishDlpJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.finish_dlp_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_finish_dlp_job_rest_bad_request( + transport: str = "rest", request_type=dlp.FinishDlpJobRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dlpJobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.finish_dlp_job(request) + + +def test_finish_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DlpServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DlpServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, + transports.DlpServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DlpServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DlpServiceGrpcTransport, + ) + + +def test_dlp_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DlpServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_dlp_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DlpServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "inspect_content", + "redact_image", + "deidentify_content", + "reidentify_content", + "list_info_types", + "create_inspect_template", + "update_inspect_template", + "get_inspect_template", + "list_inspect_templates", + "delete_inspect_template", + "create_deidentify_template", + "update_deidentify_template", + "get_deidentify_template", + "list_deidentify_templates", + "delete_deidentify_template", + "create_job_trigger", + "update_job_trigger", + "hybrid_inspect_job_trigger", "get_job_trigger", "list_job_triggers", "delete_job_trigger", @@ -9666,6 +18406,7 @@ def test_dlp_service_transport_auth_adc(transport_class): [ transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport, + transports.DlpServiceRestTransport, ], ) def test_dlp_service_transport_auth_gdch_credentials(transport_class): @@ -9760,11 +18501,23 @@ def test_dlp_service_grpc_transport_client_cert_source_for_mtls(transport_class) ) +def test_dlp_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DlpServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_dlp_service_host_no_port(transport_name): @@ -9773,7 +18526,11 @@ def test_dlp_service_host_no_port(transport_name): client_options=client_options.ClientOptions(api_endpoint="dlp.googleapis.com"), transport=transport_name, ) - assert client.transport._host == ("dlp.googleapis.com:443") + assert client.transport._host == ( + "dlp.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dlp.googleapis.com" + ) @pytest.mark.parametrize( @@ -9781,6 +18538,7 @@ def test_dlp_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_dlp_service_host_with_port(transport_name): @@ -9791,7 +18549,132 @@ def test_dlp_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dlp.googleapis.com:8000") + assert client.transport._host == ( + "dlp.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dlp.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_dlp_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DlpServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DlpServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.inspect_content._session + session2 = client2.transport.inspect_content._session + assert session1 != session2 + session1 = client1.transport.redact_image._session + session2 = client2.transport.redact_image._session + assert session1 != session2 + session1 = client1.transport.deidentify_content._session + session2 = client2.transport.deidentify_content._session + assert session1 != session2 + session1 = client1.transport.reidentify_content._session + session2 = client2.transport.reidentify_content._session + assert session1 != session2 + session1 = client1.transport.list_info_types._session + session2 = client2.transport.list_info_types._session + assert session1 != session2 + session1 = client1.transport.create_inspect_template._session + session2 = client2.transport.create_inspect_template._session + assert session1 != session2 + session1 = client1.transport.update_inspect_template._session + session2 = client2.transport.update_inspect_template._session + assert session1 != session2 + session1 = client1.transport.get_inspect_template._session + session2 = client2.transport.get_inspect_template._session + assert session1 != session2 + session1 = client1.transport.list_inspect_templates._session + session2 = client2.transport.list_inspect_templates._session + assert session1 != session2 + session1 = client1.transport.delete_inspect_template._session + session2 = client2.transport.delete_inspect_template._session + assert session1 != session2 + session1 = client1.transport.create_deidentify_template._session + session2 = client2.transport.create_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.update_deidentify_template._session + session2 = client2.transport.update_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.get_deidentify_template._session + session2 = client2.transport.get_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.list_deidentify_templates._session + session2 = client2.transport.list_deidentify_templates._session + assert session1 != session2 + session1 = client1.transport.delete_deidentify_template._session + session2 = client2.transport.delete_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.create_job_trigger._session + session2 = client2.transport.create_job_trigger._session + assert session1 != session2 + session1 = client1.transport.update_job_trigger._session + session2 = client2.transport.update_job_trigger._session + assert session1 != session2 + session1 = client1.transport.hybrid_inspect_job_trigger._session + session2 = client2.transport.hybrid_inspect_job_trigger._session + assert session1 != session2 + session1 = client1.transport.get_job_trigger._session + session2 = client2.transport.get_job_trigger._session + assert session1 != session2 + session1 = client1.transport.list_job_triggers._session + session2 = client2.transport.list_job_triggers._session + assert session1 != session2 + session1 = client1.transport.delete_job_trigger._session + session2 = client2.transport.delete_job_trigger._session + assert session1 != session2 + session1 = client1.transport.activate_job_trigger._session + session2 = client2.transport.activate_job_trigger._session + assert session1 != session2 + session1 = client1.transport.create_dlp_job._session + session2 = client2.transport.create_dlp_job._session + assert session1 != session2 + session1 = client1.transport.list_dlp_jobs._session + session2 = client2.transport.list_dlp_jobs._session + assert session1 != session2 + session1 = client1.transport.get_dlp_job._session + session2 = client2.transport.get_dlp_job._session + assert session1 != session2 + session1 = client1.transport.delete_dlp_job._session + session2 = client2.transport.delete_dlp_job._session + assert session1 != session2 + session1 = client1.transport.cancel_dlp_job._session + session2 = client2.transport.cancel_dlp_job._session + assert session1 != session2 + session1 = client1.transport.create_stored_info_type._session + session2 = client2.transport.create_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.update_stored_info_type._session + session2 = client2.transport.update_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.get_stored_info_type._session + session2 = client2.transport.get_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.list_stored_info_types._session + session2 = client2.transport.list_stored_info_types._session + assert session1 != session2 + session1 = client1.transport.delete_stored_info_type._session + session2 = client2.transport.delete_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.hybrid_inspect_dlp_job._session + session2 = client2.transport.hybrid_inspect_dlp_job._session + assert session1 != session2 + session1 = client1.transport.finish_dlp_job._session + session2 = client2.transport.finish_dlp_job._session + assert session1 != session2 def test_dlp_service_grpc_transport_channel(): @@ -10221,6 +19104,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -10238,6 +19122,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: From 465b5762d7e992cfdd92af51e55eaf491b568b23 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 16 Feb 2023 21:15:33 +0000 Subject: [PATCH 3/7] fix: Add service_yaml_parameters to py_gapic_library BUILD.bazel targets PiperOrigin-RevId: 510187992 Source-Link: https://github.com/googleapis/googleapis/commit/5edc23561778df80d5293f20132765f8757a6b2c Source-Link: https://github.com/googleapis/googleapis-gen/commit/b0bedb72e4765a3e0b674a28c50ea0f9a9b26a89 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjBiZWRiNzJlNDc2NWEzZTBiNjc0YTI4YzUwZWEwZjlhOWIyNmE4OSJ9 --- owl-bot-staging/v2/.coveragerc | 13 + owl-bot-staging/v2/.flake8 | 33 + owl-bot-staging/v2/MANIFEST.in | 2 + owl-bot-staging/v2/README.rst | 49 + owl-bot-staging/v2/docs/conf.py | 376 + .../v2/docs/dlp_v2/dlp_service.rst | 10 + owl-bot-staging/v2/docs/dlp_v2/services.rst | 6 + owl-bot-staging/v2/docs/dlp_v2/types.rst | 6 + owl-bot-staging/v2/docs/index.rst | 7 + .../v2/google/cloud/dlp/__init__.py | 395 + .../v2/google/cloud/dlp/gapic_version.py | 16 + owl-bot-staging/v2/google/cloud/dlp/py.typed | 2 + .../v2/google/cloud/dlp_v2/__init__.py | 396 + .../google/cloud/dlp_v2/gapic_metadata.json | 538 + .../v2/google/cloud/dlp_v2/gapic_version.py | 16 + .../v2/google/cloud/dlp_v2/py.typed | 2 + .../google/cloud/dlp_v2/services/__init__.py | 15 + .../dlp_v2/services/dlp_service/__init__.py | 22 + .../services/dlp_service/async_client.py | 4143 ++++ .../dlp_v2/services/dlp_service/client.py | 4269 ++++ .../dlp_v2/services/dlp_service/pagers.py | 623 + .../dlp_service/transports/__init__.py | 38 + .../services/dlp_service/transports/base.py | 752 + .../services/dlp_service/transports/grpc.py | 1262 ++ .../dlp_service/transports/grpc_asyncio.py | 1261 ++ .../services/dlp_service/transports/rest.py | 4325 ++++ .../v2/google/cloud/dlp_v2/types/__init__.py | 390 + .../v2/google/cloud/dlp_v2/types/dlp.py | 8846 ++++++++ .../v2/google/cloud/dlp_v2/types/storage.py | 1474 ++ owl-bot-staging/v2/mypy.ini | 3 + owl-bot-staging/v2/noxfile.py | 184 + ..._dlp_service_activate_job_trigger_async.py | 52 + ...d_dlp_service_activate_job_trigger_sync.py | 52 + ...erated_dlp_service_cancel_dlp_job_async.py | 50 + ...nerated_dlp_service_cancel_dlp_job_sync.py | 50 + ...ervice_create_deidentify_template_async.py | 52 + ...service_create_deidentify_template_sync.py | 52 + ...erated_dlp_service_create_dlp_job_async.py | 52 + ...nerated_dlp_service_create_dlp_job_sync.py | 52 + ...p_service_create_inspect_template_async.py | 52 + ...lp_service_create_inspect_template_sync.py | 52 + ...ed_dlp_service_create_job_trigger_async.py | 56 + ...ted_dlp_service_create_job_trigger_sync.py | 56 + ...p_service_create_stored_info_type_async.py | 52 + ...lp_service_create_stored_info_type_sync.py | 52 + ...ed_dlp_service_deidentify_content_async.py | 51 + ...ted_dlp_service_deidentify_content_sync.py | 51 + ...ervice_delete_deidentify_template_async.py | 50 + ...service_delete_deidentify_template_sync.py | 50 + ...erated_dlp_service_delete_dlp_job_async.py | 50 + ...nerated_dlp_service_delete_dlp_job_sync.py | 50 + ...p_service_delete_inspect_template_async.py | 50 + ...lp_service_delete_inspect_template_sync.py | 50 + ...ed_dlp_service_delete_job_trigger_async.py | 50 + ...ted_dlp_service_delete_job_trigger_sync.py | 50 + ...p_service_delete_stored_info_type_async.py | 50 + ...lp_service_delete_stored_info_type_sync.py | 50 + ...erated_dlp_service_finish_dlp_job_async.py | 50 + ...nerated_dlp_service_finish_dlp_job_sync.py | 50 + ...p_service_get_deidentify_template_async.py | 52 + ...lp_service_get_deidentify_template_sync.py | 52 + ...generated_dlp_service_get_dlp_job_async.py | 52 + ..._generated_dlp_service_get_dlp_job_sync.py | 52 + ..._dlp_service_get_inspect_template_async.py | 52 + ...d_dlp_service_get_inspect_template_sync.py | 52 + ...rated_dlp_service_get_job_trigger_async.py | 52 + ...erated_dlp_service_get_job_trigger_sync.py | 52 + ..._dlp_service_get_stored_info_type_async.py | 52 + ...d_dlp_service_get_stored_info_type_sync.py | 52 + ...lp_service_hybrid_inspect_dlp_job_async.py | 52 + ...dlp_service_hybrid_inspect_dlp_job_sync.py | 52 + ...ervice_hybrid_inspect_job_trigger_async.py | 52 + ...service_hybrid_inspect_job_trigger_sync.py | 52 + ...rated_dlp_service_inspect_content_async.py | 51 + ...erated_dlp_service_inspect_content_sync.py | 51 + ...service_list_deidentify_templates_async.py | 53 + ..._service_list_deidentify_templates_sync.py | 53 + ...nerated_dlp_service_list_dlp_jobs_async.py | 53 + ...enerated_dlp_service_list_dlp_jobs_sync.py | 53 + ...rated_dlp_service_list_info_types_async.py | 51 + ...erated_dlp_service_list_info_types_sync.py | 51 + ...lp_service_list_inspect_templates_async.py | 53 + ...dlp_service_list_inspect_templates_sync.py | 53 + ...ted_dlp_service_list_job_triggers_async.py | 53 + ...ated_dlp_service_list_job_triggers_sync.py | 53 + ...lp_service_list_stored_info_types_async.py | 53 + ...dlp_service_list_stored_info_types_sync.py | 53 + ...enerated_dlp_service_redact_image_async.py | 51 + ...generated_dlp_service_redact_image_sync.py | 51 + ...ed_dlp_service_reidentify_content_async.py | 52 + ...ted_dlp_service_reidentify_content_sync.py | 52 + ...ervice_update_deidentify_template_async.py | 52 + ...service_update_deidentify_template_sync.py | 52 + ...p_service_update_inspect_template_async.py | 52 + ...lp_service_update_inspect_template_sync.py | 52 + ...ed_dlp_service_update_job_trigger_async.py | 52 + ...ted_dlp_service_update_job_trigger_sync.py | 52 + ...p_service_update_stored_info_type_async.py | 52 + ...lp_service_update_stored_info_type_sync.py | 52 + ...nippet_metadata_google.privacy.dlp.v2.json | 5503 +++++ .../v2/scripts/fixup_dlp_v2_keywords.py | 209 + owl-bot-staging/v2/setup.py | 90 + .../v2/testing/constraints-3.10.txt | 6 + .../v2/testing/constraints-3.11.txt | 6 + .../v2/testing/constraints-3.12.txt | 6 + .../v2/testing/constraints-3.7.txt | 9 + .../v2/testing/constraints-3.8.txt | 6 + .../v2/testing/constraints-3.9.txt | 6 + owl-bot-staging/v2/tests/__init__.py | 16 + owl-bot-staging/v2/tests/unit/__init__.py | 16 + .../v2/tests/unit/gapic/__init__.py | 16 + .../v2/tests/unit/gapic/dlp_v2/__init__.py | 16 + .../unit/gapic/dlp_v2/test_dlp_service.py | 17404 ++++++++++++++++ 113 files changed, 56301 insertions(+) create mode 100644 owl-bot-staging/v2/.coveragerc create mode 100644 owl-bot-staging/v2/.flake8 create mode 100644 owl-bot-staging/v2/MANIFEST.in create mode 100644 owl-bot-staging/v2/README.rst create mode 100644 owl-bot-staging/v2/docs/conf.py create mode 100644 owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst create mode 100644 owl-bot-staging/v2/docs/dlp_v2/services.rst create mode 100644 owl-bot-staging/v2/docs/dlp_v2/types.rst create mode 100644 owl-bot-staging/v2/docs/index.rst create mode 100644 owl-bot-staging/v2/google/cloud/dlp/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp/gapic_version.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp/py.typed create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/py.typed create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py create mode 100644 owl-bot-staging/v2/mypy.ini create mode 100644 owl-bot-staging/v2/noxfile.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json create mode 100644 owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py create mode 100644 owl-bot-staging/v2/setup.py create mode 100644 owl-bot-staging/v2/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/v2/tests/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py diff --git a/owl-bot-staging/v2/.coveragerc b/owl-bot-staging/v2/.coveragerc new file mode 100644 index 00000000..76798ec2 --- /dev/null +++ b/owl-bot-staging/v2/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/dlp/__init__.py + google/cloud/dlp/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/v2/.flake8 b/owl-bot-staging/v2/.flake8 new file mode 100644 index 00000000..29227d4c --- /dev/null +++ b/owl-bot-staging/v2/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/v2/MANIFEST.in b/owl-bot-staging/v2/MANIFEST.in new file mode 100644 index 00000000..148f6bf3 --- /dev/null +++ b/owl-bot-staging/v2/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/dlp *.py +recursive-include google/cloud/dlp_v2 *.py diff --git a/owl-bot-staging/v2/README.rst b/owl-bot-staging/v2/README.rst new file mode 100644 index 00000000..cf97c2e7 --- /dev/null +++ b/owl-bot-staging/v2/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Dlp API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Dlp API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v2/docs/conf.py b/owl-bot-staging/v2/docs/conf.py new file mode 100644 index 00000000..cf2f570a --- /dev/null +++ b/owl-bot-staging/v2/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-dlp documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-dlp" +copyright = u"2022, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-dlp-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-dlp.tex", + u"google-cloud-dlp Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-dlp", + u"Google Cloud Dlp Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-dlp", + u"google-cloud-dlp Documentation", + author, + "google-cloud-dlp", + "GAPIC library for Google Cloud Dlp API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst b/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst new file mode 100644 index 00000000..914da512 --- /dev/null +++ b/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst @@ -0,0 +1,10 @@ +DlpService +---------------------------- + +.. automodule:: google.cloud.dlp_v2.services.dlp_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dlp_v2.services.dlp_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v2/docs/dlp_v2/services.rst b/owl-bot-staging/v2/docs/dlp_v2/services.rst new file mode 100644 index 00000000..864a8c83 --- /dev/null +++ b/owl-bot-staging/v2/docs/dlp_v2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Dlp v2 API +==================================== +.. toctree:: + :maxdepth: 2 + + dlp_service diff --git a/owl-bot-staging/v2/docs/dlp_v2/types.rst b/owl-bot-staging/v2/docs/dlp_v2/types.rst new file mode 100644 index 00000000..5470b717 --- /dev/null +++ b/owl-bot-staging/v2/docs/dlp_v2/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Dlp v2 API +================================= + +.. automodule:: google.cloud.dlp_v2.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/v2/docs/index.rst b/owl-bot-staging/v2/docs/index.rst new file mode 100644 index 00000000..d119451a --- /dev/null +++ b/owl-bot-staging/v2/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + dlp_v2/services + dlp_v2/types diff --git a/owl-bot-staging/v2/google/cloud/dlp/__init__.py b/owl-bot-staging/v2/google/cloud/dlp/__init__.py new file mode 100644 index 00000000..3c1a800c --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp/__init__.py @@ -0,0 +1,395 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dlp import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.dlp_v2.services.dlp_service.client import DlpServiceClient +from google.cloud.dlp_v2.services.dlp_service.async_client import DlpServiceAsyncClient + +from google.cloud.dlp_v2.types.dlp import Action +from google.cloud.dlp_v2.types.dlp import ActivateJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import AnalyzeDataSourceRiskDetails +from google.cloud.dlp_v2.types.dlp import BoundingBox +from google.cloud.dlp_v2.types.dlp import BucketingConfig +from google.cloud.dlp_v2.types.dlp import ByteContentItem +from google.cloud.dlp_v2.types.dlp import CancelDlpJobRequest +from google.cloud.dlp_v2.types.dlp import CharacterMaskConfig +from google.cloud.dlp_v2.types.dlp import CharsToIgnore +from google.cloud.dlp_v2.types.dlp import Color +from google.cloud.dlp_v2.types.dlp import Container +from google.cloud.dlp_v2.types.dlp import ContentItem +from google.cloud.dlp_v2.types.dlp import ContentLocation +from google.cloud.dlp_v2.types.dlp import CreateDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import CreateDlpJobRequest +from google.cloud.dlp_v2.types.dlp import CreateInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import CreateJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import CreateStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import CryptoDeterministicConfig +from google.cloud.dlp_v2.types.dlp import CryptoHashConfig +from google.cloud.dlp_v2.types.dlp import CryptoKey +from google.cloud.dlp_v2.types.dlp import CryptoReplaceFfxFpeConfig +from google.cloud.dlp_v2.types.dlp import DataProfileAction +from google.cloud.dlp_v2.types.dlp import DataProfileConfigSnapshot +from google.cloud.dlp_v2.types.dlp import DataProfileJobConfig +from google.cloud.dlp_v2.types.dlp import DataProfileLocation +from google.cloud.dlp_v2.types.dlp import DataProfilePubSubCondition +from google.cloud.dlp_v2.types.dlp import DataProfilePubSubMessage +from google.cloud.dlp_v2.types.dlp import DataRiskLevel +from google.cloud.dlp_v2.types.dlp import DateShiftConfig +from google.cloud.dlp_v2.types.dlp import DateTime +from google.cloud.dlp_v2.types.dlp import DeidentifyConfig +from google.cloud.dlp_v2.types.dlp import DeidentifyContentRequest +from google.cloud.dlp_v2.types.dlp import DeidentifyContentResponse +from google.cloud.dlp_v2.types.dlp import DeidentifyTemplate +from google.cloud.dlp_v2.types.dlp import DeleteDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import DeleteDlpJobRequest +from google.cloud.dlp_v2.types.dlp import DeleteInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import DeleteJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import DeleteStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import DlpJob +from google.cloud.dlp_v2.types.dlp import DocumentLocation +from google.cloud.dlp_v2.types.dlp import Error +from google.cloud.dlp_v2.types.dlp import ExcludeByHotword +from google.cloud.dlp_v2.types.dlp import ExcludeInfoTypes +from google.cloud.dlp_v2.types.dlp import ExclusionRule +from google.cloud.dlp_v2.types.dlp import FieldTransformation +from google.cloud.dlp_v2.types.dlp import Finding +from google.cloud.dlp_v2.types.dlp import FinishDlpJobRequest +from google.cloud.dlp_v2.types.dlp import FixedSizeBucketingConfig +from google.cloud.dlp_v2.types.dlp import GetDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import GetDlpJobRequest +from google.cloud.dlp_v2.types.dlp import GetInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import GetJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import GetStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import HybridContentItem +from google.cloud.dlp_v2.types.dlp import HybridFindingDetails +from google.cloud.dlp_v2.types.dlp import HybridInspectDlpJobRequest +from google.cloud.dlp_v2.types.dlp import HybridInspectJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import HybridInspectResponse +from google.cloud.dlp_v2.types.dlp import HybridInspectStatistics +from google.cloud.dlp_v2.types.dlp import ImageLocation +from google.cloud.dlp_v2.types.dlp import ImageTransformations +from google.cloud.dlp_v2.types.dlp import InfoTypeCategory +from google.cloud.dlp_v2.types.dlp import InfoTypeDescription +from google.cloud.dlp_v2.types.dlp import InfoTypeStats +from google.cloud.dlp_v2.types.dlp import InfoTypeSummary +from google.cloud.dlp_v2.types.dlp import InfoTypeTransformations +from google.cloud.dlp_v2.types.dlp import InspectConfig +from google.cloud.dlp_v2.types.dlp import InspectContentRequest +from google.cloud.dlp_v2.types.dlp import InspectContentResponse +from google.cloud.dlp_v2.types.dlp import InspectDataSourceDetails +from google.cloud.dlp_v2.types.dlp import InspectionRule +from google.cloud.dlp_v2.types.dlp import InspectionRuleSet +from google.cloud.dlp_v2.types.dlp import InspectJobConfig +from google.cloud.dlp_v2.types.dlp import InspectResult +from google.cloud.dlp_v2.types.dlp import InspectTemplate +from google.cloud.dlp_v2.types.dlp import JobTrigger +from google.cloud.dlp_v2.types.dlp import KmsWrappedCryptoKey +from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryConfig +from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryStats +from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesRequest +from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesResponse +from google.cloud.dlp_v2.types.dlp import ListDlpJobsRequest +from google.cloud.dlp_v2.types.dlp import ListDlpJobsResponse +from google.cloud.dlp_v2.types.dlp import ListInfoTypesRequest +from google.cloud.dlp_v2.types.dlp import ListInfoTypesResponse +from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesRequest +from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesResponse +from google.cloud.dlp_v2.types.dlp import ListJobTriggersRequest +from google.cloud.dlp_v2.types.dlp import ListJobTriggersResponse +from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesRequest +from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesResponse +from google.cloud.dlp_v2.types.dlp import Location +from google.cloud.dlp_v2.types.dlp import Manual +from google.cloud.dlp_v2.types.dlp import MetadataLocation +from google.cloud.dlp_v2.types.dlp import OtherInfoTypeSummary +from google.cloud.dlp_v2.types.dlp import OutputStorageConfig +from google.cloud.dlp_v2.types.dlp import PrimitiveTransformation +from google.cloud.dlp_v2.types.dlp import PrivacyMetric +from google.cloud.dlp_v2.types.dlp import ProfileStatus +from google.cloud.dlp_v2.types.dlp import QuasiId +from google.cloud.dlp_v2.types.dlp import QuoteInfo +from google.cloud.dlp_v2.types.dlp import Range +from google.cloud.dlp_v2.types.dlp import RecordCondition +from google.cloud.dlp_v2.types.dlp import RecordLocation +from google.cloud.dlp_v2.types.dlp import RecordSuppression +from google.cloud.dlp_v2.types.dlp import RecordTransformation +from google.cloud.dlp_v2.types.dlp import RecordTransformations +from google.cloud.dlp_v2.types.dlp import RedactConfig +from google.cloud.dlp_v2.types.dlp import RedactImageRequest +from google.cloud.dlp_v2.types.dlp import RedactImageResponse +from google.cloud.dlp_v2.types.dlp import ReidentifyContentRequest +from google.cloud.dlp_v2.types.dlp import ReidentifyContentResponse +from google.cloud.dlp_v2.types.dlp import ReplaceDictionaryConfig +from google.cloud.dlp_v2.types.dlp import ReplaceValueConfig +from google.cloud.dlp_v2.types.dlp import ReplaceWithInfoTypeConfig +from google.cloud.dlp_v2.types.dlp import RiskAnalysisJobConfig +from google.cloud.dlp_v2.types.dlp import Schedule +from google.cloud.dlp_v2.types.dlp import StatisticalTable +from google.cloud.dlp_v2.types.dlp import StorageMetadataLabel +from google.cloud.dlp_v2.types.dlp import StoredInfoType +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeConfig +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeStats +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeVersion +from google.cloud.dlp_v2.types.dlp import Table +from google.cloud.dlp_v2.types.dlp import TableDataProfile +from google.cloud.dlp_v2.types.dlp import TableLocation +from google.cloud.dlp_v2.types.dlp import TimePartConfig +from google.cloud.dlp_v2.types.dlp import TransformationConfig +from google.cloud.dlp_v2.types.dlp import TransformationDescription +from google.cloud.dlp_v2.types.dlp import TransformationDetails +from google.cloud.dlp_v2.types.dlp import TransformationDetailsStorageConfig +from google.cloud.dlp_v2.types.dlp import TransformationErrorHandling +from google.cloud.dlp_v2.types.dlp import TransformationLocation +from google.cloud.dlp_v2.types.dlp import TransformationOverview +from google.cloud.dlp_v2.types.dlp import TransformationResultStatus +from google.cloud.dlp_v2.types.dlp import TransformationSummary +from google.cloud.dlp_v2.types.dlp import TransientCryptoKey +from google.cloud.dlp_v2.types.dlp import UnwrappedCryptoKey +from google.cloud.dlp_v2.types.dlp import UpdateDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import UpdateInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import UpdateJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import UpdateStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import Value +from google.cloud.dlp_v2.types.dlp import ValueFrequency +from google.cloud.dlp_v2.types.dlp import VersionDescription +from google.cloud.dlp_v2.types.dlp import ContentOption +from google.cloud.dlp_v2.types.dlp import DlpJobType +from google.cloud.dlp_v2.types.dlp import EncryptionStatus +from google.cloud.dlp_v2.types.dlp import InfoTypeSupportedBy +from google.cloud.dlp_v2.types.dlp import MatchingType +from google.cloud.dlp_v2.types.dlp import MetadataType +from google.cloud.dlp_v2.types.dlp import RelationalOperator +from google.cloud.dlp_v2.types.dlp import ResourceVisibility +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeState +from google.cloud.dlp_v2.types.dlp import TransformationContainerType +from google.cloud.dlp_v2.types.dlp import TransformationResultStatusType +from google.cloud.dlp_v2.types.dlp import TransformationType +from google.cloud.dlp_v2.types.storage import BigQueryField +from google.cloud.dlp_v2.types.storage import BigQueryKey +from google.cloud.dlp_v2.types.storage import BigQueryOptions +from google.cloud.dlp_v2.types.storage import BigQueryTable +from google.cloud.dlp_v2.types.storage import CloudStorageFileSet +from google.cloud.dlp_v2.types.storage import CloudStorageOptions +from google.cloud.dlp_v2.types.storage import CloudStoragePath +from google.cloud.dlp_v2.types.storage import CloudStorageRegexFileSet +from google.cloud.dlp_v2.types.storage import CustomInfoType +from google.cloud.dlp_v2.types.storage import DatastoreKey +from google.cloud.dlp_v2.types.storage import DatastoreOptions +from google.cloud.dlp_v2.types.storage import EntityId +from google.cloud.dlp_v2.types.storage import FieldId +from google.cloud.dlp_v2.types.storage import HybridOptions +from google.cloud.dlp_v2.types.storage import InfoType +from google.cloud.dlp_v2.types.storage import Key +from google.cloud.dlp_v2.types.storage import KindExpression +from google.cloud.dlp_v2.types.storage import PartitionId +from google.cloud.dlp_v2.types.storage import RecordKey +from google.cloud.dlp_v2.types.storage import SensitivityScore +from google.cloud.dlp_v2.types.storage import StorageConfig +from google.cloud.dlp_v2.types.storage import StoredType +from google.cloud.dlp_v2.types.storage import TableOptions +from google.cloud.dlp_v2.types.storage import FileType +from google.cloud.dlp_v2.types.storage import Likelihood + +__all__ = ('DlpServiceClient', + 'DlpServiceAsyncClient', + 'Action', + 'ActivateJobTriggerRequest', + 'AnalyzeDataSourceRiskDetails', + 'BoundingBox', + 'BucketingConfig', + 'ByteContentItem', + 'CancelDlpJobRequest', + 'CharacterMaskConfig', + 'CharsToIgnore', + 'Color', + 'Container', + 'ContentItem', + 'ContentLocation', + 'CreateDeidentifyTemplateRequest', + 'CreateDlpJobRequest', + 'CreateInspectTemplateRequest', + 'CreateJobTriggerRequest', + 'CreateStoredInfoTypeRequest', + 'CryptoDeterministicConfig', + 'CryptoHashConfig', + 'CryptoKey', + 'CryptoReplaceFfxFpeConfig', + 'DataProfileAction', + 'DataProfileConfigSnapshot', + 'DataProfileJobConfig', + 'DataProfileLocation', + 'DataProfilePubSubCondition', + 'DataProfilePubSubMessage', + 'DataRiskLevel', + 'DateShiftConfig', + 'DateTime', + 'DeidentifyConfig', + 'DeidentifyContentRequest', + 'DeidentifyContentResponse', + 'DeidentifyTemplate', + 'DeleteDeidentifyTemplateRequest', + 'DeleteDlpJobRequest', + 'DeleteInspectTemplateRequest', + 'DeleteJobTriggerRequest', + 'DeleteStoredInfoTypeRequest', + 'DlpJob', + 'DocumentLocation', + 'Error', + 'ExcludeByHotword', + 'ExcludeInfoTypes', + 'ExclusionRule', + 'FieldTransformation', + 'Finding', + 'FinishDlpJobRequest', + 'FixedSizeBucketingConfig', + 'GetDeidentifyTemplateRequest', + 'GetDlpJobRequest', + 'GetInspectTemplateRequest', + 'GetJobTriggerRequest', + 'GetStoredInfoTypeRequest', + 'HybridContentItem', + 'HybridFindingDetails', + 'HybridInspectDlpJobRequest', + 'HybridInspectJobTriggerRequest', + 'HybridInspectResponse', + 'HybridInspectStatistics', + 'ImageLocation', + 'ImageTransformations', + 'InfoTypeCategory', + 'InfoTypeDescription', + 'InfoTypeStats', + 'InfoTypeSummary', + 'InfoTypeTransformations', + 'InspectConfig', + 'InspectContentRequest', + 'InspectContentResponse', + 'InspectDataSourceDetails', + 'InspectionRule', + 'InspectionRuleSet', + 'InspectJobConfig', + 'InspectResult', + 'InspectTemplate', + 'JobTrigger', + 'KmsWrappedCryptoKey', + 'LargeCustomDictionaryConfig', + 'LargeCustomDictionaryStats', + 'ListDeidentifyTemplatesRequest', + 'ListDeidentifyTemplatesResponse', + 'ListDlpJobsRequest', + 'ListDlpJobsResponse', + 'ListInfoTypesRequest', + 'ListInfoTypesResponse', + 'ListInspectTemplatesRequest', + 'ListInspectTemplatesResponse', + 'ListJobTriggersRequest', + 'ListJobTriggersResponse', + 'ListStoredInfoTypesRequest', + 'ListStoredInfoTypesResponse', + 'Location', + 'Manual', + 'MetadataLocation', + 'OtherInfoTypeSummary', + 'OutputStorageConfig', + 'PrimitiveTransformation', + 'PrivacyMetric', + 'ProfileStatus', + 'QuasiId', + 'QuoteInfo', + 'Range', + 'RecordCondition', + 'RecordLocation', + 'RecordSuppression', + 'RecordTransformation', + 'RecordTransformations', + 'RedactConfig', + 'RedactImageRequest', + 'RedactImageResponse', + 'ReidentifyContentRequest', + 'ReidentifyContentResponse', + 'ReplaceDictionaryConfig', + 'ReplaceValueConfig', + 'ReplaceWithInfoTypeConfig', + 'RiskAnalysisJobConfig', + 'Schedule', + 'StatisticalTable', + 'StorageMetadataLabel', + 'StoredInfoType', + 'StoredInfoTypeConfig', + 'StoredInfoTypeStats', + 'StoredInfoTypeVersion', + 'Table', + 'TableDataProfile', + 'TableLocation', + 'TimePartConfig', + 'TransformationConfig', + 'TransformationDescription', + 'TransformationDetails', + 'TransformationDetailsStorageConfig', + 'TransformationErrorHandling', + 'TransformationLocation', + 'TransformationOverview', + 'TransformationResultStatus', + 'TransformationSummary', + 'TransientCryptoKey', + 'UnwrappedCryptoKey', + 'UpdateDeidentifyTemplateRequest', + 'UpdateInspectTemplateRequest', + 'UpdateJobTriggerRequest', + 'UpdateStoredInfoTypeRequest', + 'Value', + 'ValueFrequency', + 'VersionDescription', + 'ContentOption', + 'DlpJobType', + 'EncryptionStatus', + 'InfoTypeSupportedBy', + 'MatchingType', + 'MetadataType', + 'RelationalOperator', + 'ResourceVisibility', + 'StoredInfoTypeState', + 'TransformationContainerType', + 'TransformationResultStatusType', + 'TransformationType', + 'BigQueryField', + 'BigQueryKey', + 'BigQueryOptions', + 'BigQueryTable', + 'CloudStorageFileSet', + 'CloudStorageOptions', + 'CloudStoragePath', + 'CloudStorageRegexFileSet', + 'CustomInfoType', + 'DatastoreKey', + 'DatastoreOptions', + 'EntityId', + 'FieldId', + 'HybridOptions', + 'InfoType', + 'Key', + 'KindExpression', + 'PartitionId', + 'RecordKey', + 'SensitivityScore', + 'StorageConfig', + 'StoredType', + 'TableOptions', + 'FileType', + 'Likelihood', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp/gapic_version.py b/owl-bot-staging/v2/google/cloud/dlp/gapic_version.py new file mode 100644 index 00000000..405b1ceb --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/dlp/py.typed b/owl-bot-staging/v2/google/cloud/dlp/py.typed new file mode 100644 index 00000000..23d89ef3 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py new file mode 100644 index 00000000..8397a3ad --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py @@ -0,0 +1,396 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dlp_v2 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.dlp_service import DlpServiceClient +from .services.dlp_service import DlpServiceAsyncClient + +from .types.dlp import Action +from .types.dlp import ActivateJobTriggerRequest +from .types.dlp import AnalyzeDataSourceRiskDetails +from .types.dlp import BoundingBox +from .types.dlp import BucketingConfig +from .types.dlp import ByteContentItem +from .types.dlp import CancelDlpJobRequest +from .types.dlp import CharacterMaskConfig +from .types.dlp import CharsToIgnore +from .types.dlp import Color +from .types.dlp import Container +from .types.dlp import ContentItem +from .types.dlp import ContentLocation +from .types.dlp import CreateDeidentifyTemplateRequest +from .types.dlp import CreateDlpJobRequest +from .types.dlp import CreateInspectTemplateRequest +from .types.dlp import CreateJobTriggerRequest +from .types.dlp import CreateStoredInfoTypeRequest +from .types.dlp import CryptoDeterministicConfig +from .types.dlp import CryptoHashConfig +from .types.dlp import CryptoKey +from .types.dlp import CryptoReplaceFfxFpeConfig +from .types.dlp import DataProfileAction +from .types.dlp import DataProfileConfigSnapshot +from .types.dlp import DataProfileJobConfig +from .types.dlp import DataProfileLocation +from .types.dlp import DataProfilePubSubCondition +from .types.dlp import DataProfilePubSubMessage +from .types.dlp import DataRiskLevel +from .types.dlp import DateShiftConfig +from .types.dlp import DateTime +from .types.dlp import DeidentifyConfig +from .types.dlp import DeidentifyContentRequest +from .types.dlp import DeidentifyContentResponse +from .types.dlp import DeidentifyTemplate +from .types.dlp import DeleteDeidentifyTemplateRequest +from .types.dlp import DeleteDlpJobRequest +from .types.dlp import DeleteInspectTemplateRequest +from .types.dlp import DeleteJobTriggerRequest +from .types.dlp import DeleteStoredInfoTypeRequest +from .types.dlp import DlpJob +from .types.dlp import DocumentLocation +from .types.dlp import Error +from .types.dlp import ExcludeByHotword +from .types.dlp import ExcludeInfoTypes +from .types.dlp import ExclusionRule +from .types.dlp import FieldTransformation +from .types.dlp import Finding +from .types.dlp import FinishDlpJobRequest +from .types.dlp import FixedSizeBucketingConfig +from .types.dlp import GetDeidentifyTemplateRequest +from .types.dlp import GetDlpJobRequest +from .types.dlp import GetInspectTemplateRequest +from .types.dlp import GetJobTriggerRequest +from .types.dlp import GetStoredInfoTypeRequest +from .types.dlp import HybridContentItem +from .types.dlp import HybridFindingDetails +from .types.dlp import HybridInspectDlpJobRequest +from .types.dlp import HybridInspectJobTriggerRequest +from .types.dlp import HybridInspectResponse +from .types.dlp import HybridInspectStatistics +from .types.dlp import ImageLocation +from .types.dlp import ImageTransformations +from .types.dlp import InfoTypeCategory +from .types.dlp import InfoTypeDescription +from .types.dlp import InfoTypeStats +from .types.dlp import InfoTypeSummary +from .types.dlp import InfoTypeTransformations +from .types.dlp import InspectConfig +from .types.dlp import InspectContentRequest +from .types.dlp import InspectContentResponse +from .types.dlp import InspectDataSourceDetails +from .types.dlp import InspectionRule +from .types.dlp import InspectionRuleSet +from .types.dlp import InspectJobConfig +from .types.dlp import InspectResult +from .types.dlp import InspectTemplate +from .types.dlp import JobTrigger +from .types.dlp import KmsWrappedCryptoKey +from .types.dlp import LargeCustomDictionaryConfig +from .types.dlp import LargeCustomDictionaryStats +from .types.dlp import ListDeidentifyTemplatesRequest +from .types.dlp import ListDeidentifyTemplatesResponse +from .types.dlp import ListDlpJobsRequest +from .types.dlp import ListDlpJobsResponse +from .types.dlp import ListInfoTypesRequest +from .types.dlp import ListInfoTypesResponse +from .types.dlp import ListInspectTemplatesRequest +from .types.dlp import ListInspectTemplatesResponse +from .types.dlp import ListJobTriggersRequest +from .types.dlp import ListJobTriggersResponse +from .types.dlp import ListStoredInfoTypesRequest +from .types.dlp import ListStoredInfoTypesResponse +from .types.dlp import Location +from .types.dlp import Manual +from .types.dlp import MetadataLocation +from .types.dlp import OtherInfoTypeSummary +from .types.dlp import OutputStorageConfig +from .types.dlp import PrimitiveTransformation +from .types.dlp import PrivacyMetric +from .types.dlp import ProfileStatus +from .types.dlp import QuasiId +from .types.dlp import QuoteInfo +from .types.dlp import Range +from .types.dlp import RecordCondition +from .types.dlp import RecordLocation +from .types.dlp import RecordSuppression +from .types.dlp import RecordTransformation +from .types.dlp import RecordTransformations +from .types.dlp import RedactConfig +from .types.dlp import RedactImageRequest +from .types.dlp import RedactImageResponse +from .types.dlp import ReidentifyContentRequest +from .types.dlp import ReidentifyContentResponse +from .types.dlp import ReplaceDictionaryConfig +from .types.dlp import ReplaceValueConfig +from .types.dlp import ReplaceWithInfoTypeConfig +from .types.dlp import RiskAnalysisJobConfig +from .types.dlp import Schedule +from .types.dlp import StatisticalTable +from .types.dlp import StorageMetadataLabel +from .types.dlp import StoredInfoType +from .types.dlp import StoredInfoTypeConfig +from .types.dlp import StoredInfoTypeStats +from .types.dlp import StoredInfoTypeVersion +from .types.dlp import Table +from .types.dlp import TableDataProfile +from .types.dlp import TableLocation +from .types.dlp import TimePartConfig +from .types.dlp import TransformationConfig +from .types.dlp import TransformationDescription +from .types.dlp import TransformationDetails +from .types.dlp import TransformationDetailsStorageConfig +from .types.dlp import TransformationErrorHandling +from .types.dlp import TransformationLocation +from .types.dlp import TransformationOverview +from .types.dlp import TransformationResultStatus +from .types.dlp import TransformationSummary +from .types.dlp import TransientCryptoKey +from .types.dlp import UnwrappedCryptoKey +from .types.dlp import UpdateDeidentifyTemplateRequest +from .types.dlp import UpdateInspectTemplateRequest +from .types.dlp import UpdateJobTriggerRequest +from .types.dlp import UpdateStoredInfoTypeRequest +from .types.dlp import Value +from .types.dlp import ValueFrequency +from .types.dlp import VersionDescription +from .types.dlp import ContentOption +from .types.dlp import DlpJobType +from .types.dlp import EncryptionStatus +from .types.dlp import InfoTypeSupportedBy +from .types.dlp import MatchingType +from .types.dlp import MetadataType +from .types.dlp import RelationalOperator +from .types.dlp import ResourceVisibility +from .types.dlp import StoredInfoTypeState +from .types.dlp import TransformationContainerType +from .types.dlp import TransformationResultStatusType +from .types.dlp import TransformationType +from .types.storage import BigQueryField +from .types.storage import BigQueryKey +from .types.storage import BigQueryOptions +from .types.storage import BigQueryTable +from .types.storage import CloudStorageFileSet +from .types.storage import CloudStorageOptions +from .types.storage import CloudStoragePath +from .types.storage import CloudStorageRegexFileSet +from .types.storage import CustomInfoType +from .types.storage import DatastoreKey +from .types.storage import DatastoreOptions +from .types.storage import EntityId +from .types.storage import FieldId +from .types.storage import HybridOptions +from .types.storage import InfoType +from .types.storage import Key +from .types.storage import KindExpression +from .types.storage import PartitionId +from .types.storage import RecordKey +from .types.storage import SensitivityScore +from .types.storage import StorageConfig +from .types.storage import StoredType +from .types.storage import TableOptions +from .types.storage import FileType +from .types.storage import Likelihood + +__all__ = ( + 'DlpServiceAsyncClient', +'Action', +'ActivateJobTriggerRequest', +'AnalyzeDataSourceRiskDetails', +'BigQueryField', +'BigQueryKey', +'BigQueryOptions', +'BigQueryTable', +'BoundingBox', +'BucketingConfig', +'ByteContentItem', +'CancelDlpJobRequest', +'CharacterMaskConfig', +'CharsToIgnore', +'CloudStorageFileSet', +'CloudStorageOptions', +'CloudStoragePath', +'CloudStorageRegexFileSet', +'Color', +'Container', +'ContentItem', +'ContentLocation', +'ContentOption', +'CreateDeidentifyTemplateRequest', +'CreateDlpJobRequest', +'CreateInspectTemplateRequest', +'CreateJobTriggerRequest', +'CreateStoredInfoTypeRequest', +'CryptoDeterministicConfig', +'CryptoHashConfig', +'CryptoKey', +'CryptoReplaceFfxFpeConfig', +'CustomInfoType', +'DataProfileAction', +'DataProfileConfigSnapshot', +'DataProfileJobConfig', +'DataProfileLocation', +'DataProfilePubSubCondition', +'DataProfilePubSubMessage', +'DataRiskLevel', +'DatastoreKey', +'DatastoreOptions', +'DateShiftConfig', +'DateTime', +'DeidentifyConfig', +'DeidentifyContentRequest', +'DeidentifyContentResponse', +'DeidentifyTemplate', +'DeleteDeidentifyTemplateRequest', +'DeleteDlpJobRequest', +'DeleteInspectTemplateRequest', +'DeleteJobTriggerRequest', +'DeleteStoredInfoTypeRequest', +'DlpJob', +'DlpJobType', +'DlpServiceClient', +'DocumentLocation', +'EncryptionStatus', +'EntityId', +'Error', +'ExcludeByHotword', +'ExcludeInfoTypes', +'ExclusionRule', +'FieldId', +'FieldTransformation', +'FileType', +'Finding', +'FinishDlpJobRequest', +'FixedSizeBucketingConfig', +'GetDeidentifyTemplateRequest', +'GetDlpJobRequest', +'GetInspectTemplateRequest', +'GetJobTriggerRequest', +'GetStoredInfoTypeRequest', +'HybridContentItem', +'HybridFindingDetails', +'HybridInspectDlpJobRequest', +'HybridInspectJobTriggerRequest', +'HybridInspectResponse', +'HybridInspectStatistics', +'HybridOptions', +'ImageLocation', +'ImageTransformations', +'InfoType', +'InfoTypeCategory', +'InfoTypeDescription', +'InfoTypeStats', +'InfoTypeSummary', +'InfoTypeSupportedBy', +'InfoTypeTransformations', +'InspectConfig', +'InspectContentRequest', +'InspectContentResponse', +'InspectDataSourceDetails', +'InspectJobConfig', +'InspectResult', +'InspectTemplate', +'InspectionRule', +'InspectionRuleSet', +'JobTrigger', +'Key', +'KindExpression', +'KmsWrappedCryptoKey', +'LargeCustomDictionaryConfig', +'LargeCustomDictionaryStats', +'Likelihood', +'ListDeidentifyTemplatesRequest', +'ListDeidentifyTemplatesResponse', +'ListDlpJobsRequest', +'ListDlpJobsResponse', +'ListInfoTypesRequest', +'ListInfoTypesResponse', +'ListInspectTemplatesRequest', +'ListInspectTemplatesResponse', +'ListJobTriggersRequest', +'ListJobTriggersResponse', +'ListStoredInfoTypesRequest', +'ListStoredInfoTypesResponse', +'Location', +'Manual', +'MatchingType', +'MetadataLocation', +'MetadataType', +'OtherInfoTypeSummary', +'OutputStorageConfig', +'PartitionId', +'PrimitiveTransformation', +'PrivacyMetric', +'ProfileStatus', +'QuasiId', +'QuoteInfo', +'Range', +'RecordCondition', +'RecordKey', +'RecordLocation', +'RecordSuppression', +'RecordTransformation', +'RecordTransformations', +'RedactConfig', +'RedactImageRequest', +'RedactImageResponse', +'ReidentifyContentRequest', +'ReidentifyContentResponse', +'RelationalOperator', +'ReplaceDictionaryConfig', +'ReplaceValueConfig', +'ReplaceWithInfoTypeConfig', +'ResourceVisibility', +'RiskAnalysisJobConfig', +'Schedule', +'SensitivityScore', +'StatisticalTable', +'StorageConfig', +'StorageMetadataLabel', +'StoredInfoType', +'StoredInfoTypeConfig', +'StoredInfoTypeState', +'StoredInfoTypeStats', +'StoredInfoTypeVersion', +'StoredType', +'Table', +'TableDataProfile', +'TableLocation', +'TableOptions', +'TimePartConfig', +'TransformationConfig', +'TransformationContainerType', +'TransformationDescription', +'TransformationDetails', +'TransformationDetailsStorageConfig', +'TransformationErrorHandling', +'TransformationLocation', +'TransformationOverview', +'TransformationResultStatus', +'TransformationResultStatusType', +'TransformationSummary', +'TransformationType', +'TransientCryptoKey', +'UnwrappedCryptoKey', +'UpdateDeidentifyTemplateRequest', +'UpdateInspectTemplateRequest', +'UpdateJobTriggerRequest', +'UpdateStoredInfoTypeRequest', +'Value', +'ValueFrequency', +'VersionDescription', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json new file mode 100644 index 00000000..634002d4 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json @@ -0,0 +1,538 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.dlp_v2", + "protoPackage": "google.privacy.dlp.v2", + "schema": "1.0", + "services": { + "DlpService": { + "clients": { + "grpc": { + "libraryClient": "DlpServiceClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DlpServiceAsyncClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + }, + "rest": { + "libraryClient": "DlpServiceClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py new file mode 100644 index 00000000..405b1ceb --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed b/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed new file mode 100644 index 00000000..23d89ef3 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py new file mode 100644 index 00000000..e8e1c384 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py new file mode 100644 index 00000000..aa9c062a --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DlpServiceClient +from .async_client import DlpServiceAsyncClient + +__all__ = ( + 'DlpServiceClient', + 'DlpServiceAsyncClient', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py new file mode 100644 index 00000000..041479c1 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py @@ -0,0 +1,4143 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dlp_v2 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.types import dlp +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport +from .client import DlpServiceClient + + +class DlpServiceAsyncClient: + """The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + """ + + _client: DlpServiceClient + + DEFAULT_ENDPOINT = DlpServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DlpServiceClient.DEFAULT_MTLS_ENDPOINT + + deidentify_template_path = staticmethod(DlpServiceClient.deidentify_template_path) + parse_deidentify_template_path = staticmethod(DlpServiceClient.parse_deidentify_template_path) + dlp_content_path = staticmethod(DlpServiceClient.dlp_content_path) + parse_dlp_content_path = staticmethod(DlpServiceClient.parse_dlp_content_path) + dlp_job_path = staticmethod(DlpServiceClient.dlp_job_path) + parse_dlp_job_path = staticmethod(DlpServiceClient.parse_dlp_job_path) + finding_path = staticmethod(DlpServiceClient.finding_path) + parse_finding_path = staticmethod(DlpServiceClient.parse_finding_path) + inspect_template_path = staticmethod(DlpServiceClient.inspect_template_path) + parse_inspect_template_path = staticmethod(DlpServiceClient.parse_inspect_template_path) + job_trigger_path = staticmethod(DlpServiceClient.job_trigger_path) + parse_job_trigger_path = staticmethod(DlpServiceClient.parse_job_trigger_path) + stored_info_type_path = staticmethod(DlpServiceClient.stored_info_type_path) + parse_stored_info_type_path = staticmethod(DlpServiceClient.parse_stored_info_type_path) + common_billing_account_path = staticmethod(DlpServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(DlpServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(DlpServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(DlpServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(DlpServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(DlpServiceClient.parse_common_organization_path) + common_project_path = staticmethod(DlpServiceClient.common_project_path) + parse_common_project_path = staticmethod(DlpServiceClient.parse_common_project_path) + common_location_path = staticmethod(DlpServiceClient.common_location_path) + parse_common_location_path = staticmethod(DlpServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceAsyncClient: The constructed client. + """ + return DlpServiceClient.from_service_account_info.__func__(DlpServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceAsyncClient: The constructed client. + """ + return DlpServiceClient.from_service_account_file.__func__(DlpServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DlpServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DlpServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DlpServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(DlpServiceClient).get_transport_class, type(DlpServiceClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DlpServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dlp service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DlpServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DlpServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def inspect_content(self, + request: Optional[Union[dlp.InspectContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectContentResponse: + r"""Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_inspect_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.InspectContentRequest( + ) + + # Make the request + response = await client.inspect_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.InspectContentRequest, dict]]): + The request object. Request to search for potentially + sensitive info in a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectContentResponse: + Results of inspecting an item. + """ + # Create or coerce a protobuf request object. + request = dlp.InspectContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.inspect_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def redact_image(self, + request: Optional[Union[dlp.RedactImageRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.RedactImageResponse: + r"""Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_redact_image(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.RedactImageRequest( + ) + + # Make the request + response = await client.redact_image(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.RedactImageRequest, dict]]): + The request object. Request to search for potentially + sensitive info in an image and redact it by covering it + with a colored rectangle. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.RedactImageResponse: + Results of redacting an image. + """ + # Create or coerce a protobuf request object. + request = dlp.RedactImageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.redact_image, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def deidentify_content(self, + request: Optional[Union[dlp.DeidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyContentResponse: + r"""De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_deidentify_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeidentifyContentRequest( + ) + + # Make the request + response = await client.deidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeidentifyContentRequest, dict]]): + The request object. Request to de-identify a + ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + # Create or coerce a protobuf request object. + request = dlp.DeidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.deidentify_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def reidentify_content(self, + request: Optional[Union[dlp.ReidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ReidentifyContentResponse: + r"""Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_reidentify_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ReidentifyContentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.reidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ReidentifyContentRequest, dict]]): + The request object. Request to re-identify an item. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ReidentifyContentResponse: + Results of re-identifying an item. + """ + # Create or coerce a protobuf request object. + request = dlp.ReidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reidentify_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_info_types(self, + request: Optional[Union[dlp.ListInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListInfoTypesResponse: + r"""Returns a list of the sensitive information types + that DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_info_types(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListInfoTypesRequest( + ) + + # Make the request + response = await client.list_info_types(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListInfoTypesRequest, dict]]): + The request object. Request for the list of infoTypes. + parent (:class:`str`): + The parent resource name. + + The format of this value is as follows: + + :: + + locations/LOCATION_ID + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListInfoTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_info_types, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_inspect_template(self, + request: Optional[Union[dlp.CreateInspectTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/dlp/docs/creating-templates to + learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateInspectTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateInspectTemplateRequest, dict]]): + The request object. Request message for + CreateInspectTemplate. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): + Required. The InspectTemplate to + create. + + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_template]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_template is not None: + request.inspect_template = inspect_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_inspect_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_inspect_template(self, + request: Optional[Union[dlp.UpdateInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.update_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateInspectTemplateRequest, dict]]): + The request object. Request message for + UpdateInspectTemplate. + name (:class:`str`): + Required. Resource name of organization and + inspectTemplate to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): + New InspectTemplate value. + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, inspect_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.UpdateInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if inspect_template is not None: + request.inspect_template = inspect_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_inspect_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_inspect_template(self, + request: Optional[Union[dlp.GetInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetInspectTemplateRequest, dict]]): + The request object. Request message for + GetInspectTemplate. + name (:class:`str`): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_inspect_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_inspect_templates(self, + request: Optional[Union[dlp.ListInspectTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInspectTemplatesAsyncPager: + r"""Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_inspect_templates(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListInspectTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inspect_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListInspectTemplatesRequest, dict]]): + The request object. Request message for + ListInspectTemplates. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager: + Response message for + ListInspectTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListInspectTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_inspect_templates, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInspectTemplatesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_inspect_template(self, + request: Optional[Union[dlp.DeleteInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteInspectTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_inspect_template(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteInspectTemplateRequest, dict]]): + The request object. Request message for + DeleteInspectTemplate. + name (:class:`str`): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_inspect_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_deidentify_template(self, + request: Optional[Union[dlp.CreateDeidentifyTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDeidentifyTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest, dict]]): + The request object. Request message for + CreateDeidentifyTemplate. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): + Required. The DeidentifyTemplate to + create. + + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deidentify_template]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deidentify_template is not None: + request.deidentify_template = deidentify_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_deidentify_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_deidentify_template(self, + request: Optional[Union[dlp.UpdateDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Updates the DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.update_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest, dict]]): + The request object. Request message for + UpdateDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): + New DeidentifyTemplate value. + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, deidentify_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.UpdateDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if deidentify_template is not None: + request.deidentify_template = deidentify_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_deidentify_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_deidentify_template(self, + request: Optional[Union[dlp.GetDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Gets a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest, dict]]): + The request object. Request message for + GetDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of the organization and + deidentify template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_deidentify_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_deidentify_templates(self, + request: Optional[Union[dlp.ListDeidentifyTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeidentifyTemplatesAsyncPager: + r"""Lists DeidentifyTemplates. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_deidentify_templates(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDeidentifyTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deidentify_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest, dict]]): + The request object. Request message for + ListDeidentifyTemplates. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager: + Response message for + ListDeidentifyTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListDeidentifyTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_deidentify_templates, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDeidentifyTemplatesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_deidentify_template(self, + request: Optional[Union[dlp.DeleteDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_deidentify_template(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest, dict]]): + The request object. Request message for + DeleteDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of the organization and + deidentify template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_deidentify_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_job_trigger(self, + request: Optional[Union[dlp.CreateJobTriggerRequest, dict]] = None, + *, + parent: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + job_trigger = dlp_v2.JobTrigger() + job_trigger.status = "CANCELLED" + + request = dlp_v2.CreateJobTriggerRequest( + parent="parent_value", + job_trigger=job_trigger, + ) + + # Make the request + response = await client.create_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateJobTriggerRequest, dict]]): + The request object. Request message for + CreateJobTrigger. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): + Required. The JobTrigger to create. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job_trigger]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job_trigger is not None: + request.job_trigger = job_trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_job_trigger(self, + request: Optional[Union[dlp.UpdateJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Updates a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.update_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateJobTriggerRequest, dict]]): + The request object. Request message for + UpdateJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): + New JobTrigger value. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, job_trigger, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.UpdateJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if job_trigger is not None: + request.job_trigger = job_trigger + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def hybrid_inspect_job_trigger(self, + request: Optional[Union[dlp.HybridInspectJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_hybrid_inspect_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.hybrid_inspect_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest, dict]]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (:class:`str`): + Required. Resource name of the trigger to execute a + hybrid inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.HybridInspectJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.hybrid_inspect_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job_trigger(self, + request: Optional[Union[dlp.GetJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Gets a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetJobTriggerRequest, dict]]): + The request object. Request message for GetJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_job_triggers(self, + request: Optional[Union[dlp.ListJobTriggersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobTriggersAsyncPager: + r"""Lists job triggers. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_job_triggers(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListJobTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_triggers(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListJobTriggersRequest, dict]]): + The request object. Request message for ListJobTriggers. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager: + Response message for ListJobTriggers. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListJobTriggersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_job_triggers, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobTriggersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_job_trigger(self, + request: Optional[Union[dlp.DeleteJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteJobTriggerRequest( + name="name_value", + ) + + # Make the request + await client.delete_job_trigger(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteJobTriggerRequest, dict]]): + The request object. Request message for + DeleteJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_job_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def activate_job_trigger(self, + request: Optional[Union[dlp.ActivateJobTriggerRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_activate_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ActivateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.activate_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ActivateJobTriggerRequest, dict]]): + The request object. Request message for + ActivateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + request = dlp.ActivateJobTriggerRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.activate_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_dlp_job(self, + request: Optional[Union[dlp.CreateDlpJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_job: Optional[dlp.InspectJobConfig] = None, + risk_job: Optional[dlp.RiskAnalysisJobConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDlpJobRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateDlpJobRequest, dict]]): + The request object. Request message for + CreateDlpJobRequest. Used to initiate long running jobs + such as calculating risk metrics or inspecting Google + Cloud Storage. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_job (:class:`google.cloud.dlp_v2.types.InspectJobConfig`): + An inspection job scans a storage + repository for InfoTypes. + + This corresponds to the ``inspect_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + risk_job (:class:`google.cloud.dlp_v2.types.RiskAnalysisJobConfig`): + A risk analysis job calculates + re-identification risk metrics for a + BigQuery table. + + This corresponds to the ``risk_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_job, risk_job]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_job is not None: + request.inspect_job = inspect_job + if risk_job is not None: + request.risk_job = risk_job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_dlp_jobs(self, + request: Optional[Union[dlp.ListDlpJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDlpJobsAsyncPager: + r"""Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_dlp_jobs(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDlpJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dlp_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListDlpJobsRequest, dict]]): + The request object. The request message for listing DLP + jobs. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager: + The response message for listing DLP + jobs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListDlpJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_dlp_jobs, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDlpJobsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_dlp_job(self, + request: Optional[Union[dlp.GetDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDlpJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetDlpJobRequest, dict]]): + The request object. The request message for + [DlpJobs.GetDlpJob][]. + name (:class:`str`): + Required. The name of the DlpJob + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_dlp_job, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_dlp_job(self, + request: Optional[Union[dlp.DeleteDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.delete_dlp_job(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteDlpJobRequest, dict]]): + The request object. The request message for deleting a + DLP job. + name (:class:`str`): + Required. The name of the DlpJob + resource to be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_dlp_job, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_dlp_job(self, + request: Optional[Union[dlp.CancelDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_cancel_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CancelDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_dlp_job(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CancelDlpJobRequest, dict]]): + The request object. The request message for canceling a + DLP job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = dlp.CancelDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_stored_info_type(self, + request: Optional[Union[dlp.CreateStoredInfoTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateStoredInfoTypeRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest, dict]]): + The request object. Request message for + CreateStoredInfoType. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): + Required. Configuration of the + storedInfoType to create. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, config]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if config is not None: + request.config = config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_stored_info_type, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_stored_info_type(self, + request: Optional[Union[dlp.UpdateStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.update_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest, dict]]): + The request object. Request message for + UpdateStoredInfoType. + name (:class:`str`): + Required. Resource name of organization and + storedInfoType to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): + Updated configuration for the + storedInfoType. If not provided, a new + version of the storedInfoType will be + created with the existing configuration. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.UpdateStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if config is not None: + request.config = config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_stored_info_type, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_stored_info_type(self, + request: Optional[Union[dlp.GetStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Gets a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetStoredInfoTypeRequest, dict]]): + The request object. Request message for + GetStoredInfoType. + name (:class:`str`): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_stored_info_type, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_stored_info_types(self, + request: Optional[Union[dlp.ListStoredInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListStoredInfoTypesAsyncPager: + r"""Lists stored infoTypes. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_stored_info_types(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListStoredInfoTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_stored_info_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListStoredInfoTypesRequest, dict]]): + The request object. Request message for + ListStoredInfoTypes. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager: + Response message for + ListStoredInfoTypes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListStoredInfoTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_stored_info_types, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListStoredInfoTypesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_stored_info_type(self, + request: Optional[Union[dlp.DeleteStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + await client.delete_stored_info_type(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest, dict]]): + The request object. Request message for + DeleteStoredInfoType. + name (:class:`str`): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_stored_info_type, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def hybrid_inspect_dlp_job(self, + request: Optional[Union[dlp.HybridInspectDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_hybrid_inspect_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectDlpJobRequest( + name="name_value", + ) + + # Make the request + response = await client.hybrid_inspect_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.HybridInspectDlpJobRequest, dict]]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (:class:`str`): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.HybridInspectDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.hybrid_inspect_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def finish_dlp_job(self, + request: Optional[Union[dlp.FinishDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_finish_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.FinishDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.finish_dlp_job(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.FinishDlpJobRequest, dict]]): + The request object. The request message for finishing a + DLP hybrid job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = dlp.FinishDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.finish_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DlpServiceAsyncClient", +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py new file mode 100644 index 00000000..9d90a824 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py @@ -0,0 +1,4269 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.dlp_v2 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.types import dlp +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DlpServiceGrpcTransport +from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport +from .transports.rest import DlpServiceRestTransport + + +class DlpServiceClientMeta(type): + """Metaclass for the DlpService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] + _transport_registry["grpc"] = DlpServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DlpServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DlpServiceRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[DlpServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DlpServiceClient(metaclass=DlpServiceClientMeta): + """The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dlp.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DlpServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DlpServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def deidentify_template_path(organization: str,deidentify_template: str,) -> str: + """Returns a fully-qualified deidentify_template string.""" + return "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) + + @staticmethod + def parse_deidentify_template_path(path: str) -> Dict[str,str]: + """Parses a deidentify_template path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/deidentifyTemplates/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def dlp_content_path(project: str,) -> str: + """Returns a fully-qualified dlp_content string.""" + return "projects/{project}/dlpContent".format(project=project, ) + + @staticmethod + def parse_dlp_content_path(path: str) -> Dict[str,str]: + """Parses a dlp_content path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/dlpContent$", path) + return m.groupdict() if m else {} + + @staticmethod + def dlp_job_path(project: str,dlp_job: str,) -> str: + """Returns a fully-qualified dlp_job string.""" + return "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) + + @staticmethod + def parse_dlp_job_path(path: str) -> Dict[str,str]: + """Parses a dlp_job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/dlpJobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def finding_path(project: str,location: str,finding: str,) -> str: + """Returns a fully-qualified finding string.""" + return "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) + + @staticmethod + def parse_finding_path(path: str) -> Dict[str,str]: + """Parses a finding path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/findings/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def inspect_template_path(organization: str,inspect_template: str,) -> str: + """Returns a fully-qualified inspect_template string.""" + return "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) + + @staticmethod + def parse_inspect_template_path(path: str) -> Dict[str,str]: + """Parses a inspect_template path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/inspectTemplates/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def job_trigger_path(project: str,job_trigger: str,) -> str: + """Returns a fully-qualified job_trigger string.""" + return "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) + + @staticmethod + def parse_job_trigger_path(path: str) -> Dict[str,str]: + """Parses a job_trigger path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/jobTriggers/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def stored_info_type_path(organization: str,stored_info_type: str,) -> str: + """Returns a fully-qualified stored_info_type string.""" + return "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) + + @staticmethod + def parse_stored_info_type_path(path: str) -> Dict[str,str]: + """Parses a stored_info_type path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/storedInfoTypes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DlpServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dlp service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DlpServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DlpServiceTransport): + # transport is a DlpServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def inspect_content(self, + request: Optional[Union[dlp.InspectContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectContentResponse: + r"""Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_inspect_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.InspectContentRequest( + ) + + # Make the request + response = client.inspect_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.InspectContentRequest, dict]): + The request object. Request to search for potentially + sensitive info in a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectContentResponse: + Results of inspecting an item. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.InspectContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.InspectContentRequest): + request = dlp.InspectContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.inspect_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def redact_image(self, + request: Optional[Union[dlp.RedactImageRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.RedactImageResponse: + r"""Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_redact_image(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.RedactImageRequest( + ) + + # Make the request + response = client.redact_image(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.RedactImageRequest, dict]): + The request object. Request to search for potentially + sensitive info in an image and redact it by covering it + with a colored rectangle. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.RedactImageResponse: + Results of redacting an image. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.RedactImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.RedactImageRequest): + request = dlp.RedactImageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.redact_image] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def deidentify_content(self, + request: Optional[Union[dlp.DeidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyContentResponse: + r"""De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_deidentify_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeidentifyContentRequest( + ) + + # Make the request + response = client.deidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.DeidentifyContentRequest, dict]): + The request object. Request to de-identify a + ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeidentifyContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeidentifyContentRequest): + request = dlp.DeidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.deidentify_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def reidentify_content(self, + request: Optional[Union[dlp.ReidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ReidentifyContentResponse: + r"""Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_reidentify_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ReidentifyContentRequest( + parent="parent_value", + ) + + # Make the request + response = client.reidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ReidentifyContentRequest, dict]): + The request object. Request to re-identify an item. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ReidentifyContentResponse: + Results of re-identifying an item. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ReidentifyContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ReidentifyContentRequest): + request = dlp.ReidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reidentify_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_info_types(self, + request: Optional[Union[dlp.ListInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListInfoTypesResponse: + r"""Returns a list of the sensitive information types + that DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_info_types(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListInfoTypesRequest( + ) + + # Make the request + response = client.list_info_types(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListInfoTypesRequest, dict]): + The request object. Request for the list of infoTypes. + parent (str): + The parent resource name. + + The format of this value is as follows: + + :: + + locations/LOCATION_ID + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListInfoTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListInfoTypesRequest): + request = dlp.ListInfoTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_info_types] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_inspect_template(self, + request: Optional[Union[dlp.CreateInspectTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/dlp/docs/creating-templates to + learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateInspectTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateInspectTemplateRequest, dict]): + The request object. Request message for + CreateInspectTemplate. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + Required. The InspectTemplate to + create. + + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateInspectTemplateRequest): + request = dlp.CreateInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_template is not None: + request.inspect_template = inspect_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_inspect_template(self, + request: Optional[Union[dlp.UpdateInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.update_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateInspectTemplateRequest, dict]): + The request object. Request message for + UpdateInspectTemplate. + name (str): + Required. Resource name of organization and + inspectTemplate to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + New InspectTemplate value. + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, inspect_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateInspectTemplateRequest): + request = dlp.UpdateInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if inspect_template is not None: + request.inspect_template = inspect_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_inspect_template(self, + request: Optional[Union[dlp.GetInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetInspectTemplateRequest, dict]): + The request object. Request message for + GetInspectTemplate. + name (str): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetInspectTemplateRequest): + request = dlp.GetInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_inspect_templates(self, + request: Optional[Union[dlp.ListInspectTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInspectTemplatesPager: + r"""Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_inspect_templates(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListInspectTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inspect_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListInspectTemplatesRequest, dict]): + The request object. Request message for + ListInspectTemplates. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager: + Response message for + ListInspectTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListInspectTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListInspectTemplatesRequest): + request = dlp.ListInspectTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_inspect_templates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInspectTemplatesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_inspect_template(self, + request: Optional[Union[dlp.DeleteInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteInspectTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_inspect_template(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteInspectTemplateRequest, dict]): + The request object. Request message for + DeleteInspectTemplate. + name (str): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteInspectTemplateRequest): + request = dlp.DeleteInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_deidentify_template(self, + request: Optional[Union[dlp.CreateDeidentifyTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDeidentifyTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest, dict]): + The request object. Request message for + CreateDeidentifyTemplate. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + Required. The DeidentifyTemplate to + create. + + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deidentify_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateDeidentifyTemplateRequest): + request = dlp.CreateDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deidentify_template is not None: + request.deidentify_template = deidentify_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_deidentify_template(self, + request: Optional[Union[dlp.UpdateDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Updates the DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.update_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest, dict]): + The request object. Request message for + UpdateDeidentifyTemplate. + name (str): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + New DeidentifyTemplate value. + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, deidentify_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateDeidentifyTemplateRequest): + request = dlp.UpdateDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if deidentify_template is not None: + request.deidentify_template = deidentify_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_deidentify_template(self, + request: Optional[Union[dlp.GetDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Gets a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest, dict]): + The request object. Request message for + GetDeidentifyTemplate. + name (str): + Required. Resource name of the organization and + deidentify template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetDeidentifyTemplateRequest): + request = dlp.GetDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_deidentify_templates(self, + request: Optional[Union[dlp.ListDeidentifyTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeidentifyTemplatesPager: + r"""Lists DeidentifyTemplates. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_deidentify_templates(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDeidentifyTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deidentify_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest, dict]): + The request object. Request message for + ListDeidentifyTemplates. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager: + Response message for + ListDeidentifyTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListDeidentifyTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListDeidentifyTemplatesRequest): + request = dlp.ListDeidentifyTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_deidentify_templates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDeidentifyTemplatesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_deidentify_template(self, + request: Optional[Union[dlp.DeleteDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_deidentify_template(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest, dict]): + The request object. Request message for + DeleteDeidentifyTemplate. + name (str): + Required. Resource name of the organization and + deidentify template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteDeidentifyTemplateRequest): + request = dlp.DeleteDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_job_trigger(self, + request: Optional[Union[dlp.CreateJobTriggerRequest, dict]] = None, + *, + parent: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + job_trigger = dlp_v2.JobTrigger() + job_trigger.status = "CANCELLED" + + request = dlp_v2.CreateJobTriggerRequest( + parent="parent_value", + job_trigger=job_trigger, + ) + + # Make the request + response = client.create_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateJobTriggerRequest, dict]): + The request object. Request message for + CreateJobTrigger. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + Required. The JobTrigger to create. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job_trigger]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateJobTriggerRequest): + request = dlp.CreateJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job_trigger is not None: + request.job_trigger = job_trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_job_trigger(self, + request: Optional[Union[dlp.UpdateJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Updates a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.update_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateJobTriggerRequest, dict]): + The request object. Request message for + UpdateJobTrigger. + name (str): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + New JobTrigger value. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, job_trigger, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateJobTriggerRequest): + request = dlp.UpdateJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if job_trigger is not None: + request.job_trigger = job_trigger + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def hybrid_inspect_job_trigger(self, + request: Optional[Union[dlp.HybridInspectJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_hybrid_inspect_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.hybrid_inspect_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest, dict]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (str): + Required. Resource name of the trigger to execute a + hybrid inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.HybridInspectJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.HybridInspectJobTriggerRequest): + request = dlp.HybridInspectJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job_trigger(self, + request: Optional[Union[dlp.GetJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Gets a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.get_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetJobTriggerRequest, dict]): + The request object. Request message for GetJobTrigger. + name (str): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetJobTriggerRequest): + request = dlp.GetJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_job_triggers(self, + request: Optional[Union[dlp.ListJobTriggersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobTriggersPager: + r"""Lists job triggers. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_job_triggers(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListJobTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_triggers(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListJobTriggersRequest, dict]): + The request object. Request message for ListJobTriggers. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager: + Response message for ListJobTriggers. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListJobTriggersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListJobTriggersRequest): + request = dlp.ListJobTriggersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_job_triggers] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobTriggersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_job_trigger(self, + request: Optional[Union[dlp.DeleteJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteJobTriggerRequest( + name="name_value", + ) + + # Make the request + client.delete_job_trigger(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteJobTriggerRequest, dict]): + The request object. Request message for + DeleteJobTrigger. + name (str): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteJobTriggerRequest): + request = dlp.DeleteJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def activate_job_trigger(self, + request: Optional[Union[dlp.ActivateJobTriggerRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_activate_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ActivateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.activate_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ActivateJobTriggerRequest, dict]): + The request object. Request message for + ActivateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ActivateJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ActivateJobTriggerRequest): + request = dlp.ActivateJobTriggerRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.activate_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_dlp_job(self, + request: Optional[Union[dlp.CreateDlpJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_job: Optional[dlp.InspectJobConfig] = None, + risk_job: Optional[dlp.RiskAnalysisJobConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDlpJobRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateDlpJobRequest, dict]): + The request object. Request message for + CreateDlpJobRequest. Used to initiate long running jobs + such as calculating risk metrics or inspecting Google + Cloud Storage. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): + An inspection job scans a storage + repository for InfoTypes. + + This corresponds to the ``inspect_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): + A risk analysis job calculates + re-identification risk metrics for a + BigQuery table. + + This corresponds to the ``risk_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_job, risk_job]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateDlpJobRequest): + request = dlp.CreateDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_job is not None: + request.inspect_job = inspect_job + if risk_job is not None: + request.risk_job = risk_job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_dlp_jobs(self, + request: Optional[Union[dlp.ListDlpJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDlpJobsPager: + r"""Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_dlp_jobs(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDlpJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dlp_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListDlpJobsRequest, dict]): + The request object. The request message for listing DLP + jobs. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager: + The response message for listing DLP + jobs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListDlpJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListDlpJobsRequest): + request = dlp.ListDlpJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_dlp_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDlpJobsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_dlp_job(self, + request: Optional[Union[dlp.GetDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDlpJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetDlpJobRequest, dict]): + The request object. The request message for + [DlpJobs.GetDlpJob][]. + name (str): + Required. The name of the DlpJob + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetDlpJobRequest): + request = dlp.GetDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_dlp_job(self, + request: Optional[Union[dlp.DeleteDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDlpJobRequest( + name="name_value", + ) + + # Make the request + client.delete_dlp_job(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteDlpJobRequest, dict]): + The request object. The request message for deleting a + DLP job. + name (str): + Required. The name of the DlpJob + resource to be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteDlpJobRequest): + request = dlp.DeleteDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_dlp_job(self, + request: Optional[Union[dlp.CancelDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_cancel_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CancelDlpJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_dlp_job(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.CancelDlpJobRequest, dict]): + The request object. The request message for canceling a + DLP job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CancelDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CancelDlpJobRequest): + request = dlp.CancelDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_stored_info_type(self, + request: Optional[Union[dlp.CreateStoredInfoTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateStoredInfoTypeRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest, dict]): + The request object. Request message for + CreateStoredInfoType. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Required. Configuration of the + storedInfoType to create. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, config]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateStoredInfoTypeRequest): + request = dlp.CreateStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if config is not None: + request.config = config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_stored_info_type(self, + request: Optional[Union[dlp.UpdateStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = client.update_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest, dict]): + The request object. Request message for + UpdateStoredInfoType. + name (str): + Required. Resource name of organization and + storedInfoType to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Updated configuration for the + storedInfoType. If not provided, a new + version of the storedInfoType will be + created with the existing configuration. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateStoredInfoTypeRequest): + request = dlp.UpdateStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if config is not None: + request.config = config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_stored_info_type(self, + request: Optional[Union[dlp.GetStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Gets a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetStoredInfoTypeRequest, dict]): + The request object. Request message for + GetStoredInfoType. + name (str): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetStoredInfoTypeRequest): + request = dlp.GetStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_stored_info_types(self, + request: Optional[Union[dlp.ListStoredInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListStoredInfoTypesPager: + r"""Lists stored infoTypes. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_stored_info_types(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListStoredInfoTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_stored_info_types(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListStoredInfoTypesRequest, dict]): + The request object. Request message for + ListStoredInfoTypes. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager: + Response message for + ListStoredInfoTypes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListStoredInfoTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListStoredInfoTypesRequest): + request = dlp.ListStoredInfoTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_stored_info_types] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListStoredInfoTypesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_stored_info_type(self, + request: Optional[Union[dlp.DeleteStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + client.delete_stored_info_type(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest, dict]): + The request object. Request message for + DeleteStoredInfoType. + name (str): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteStoredInfoTypeRequest): + request = dlp.DeleteStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def hybrid_inspect_dlp_job(self, + request: Optional[Union[dlp.HybridInspectDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_hybrid_inspect_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectDlpJobRequest( + name="name_value", + ) + + # Make the request + response = client.hybrid_inspect_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.HybridInspectDlpJobRequest, dict]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (str): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.HybridInspectDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.HybridInspectDlpJobRequest): + request = dlp.HybridInspectDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def finish_dlp_job(self, + request: Optional[Union[dlp.FinishDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_finish_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.FinishDlpJobRequest( + name="name_value", + ) + + # Make the request + client.finish_dlp_job(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.FinishDlpJobRequest, dict]): + The request object. The request message for finishing a + DLP hybrid job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.FinishDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.FinishDlpJobRequest): + request = dlp.FinishDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.finish_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "DlpServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DlpServiceClient", +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py new file mode 100644 index 00000000..73a0e48f --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py @@ -0,0 +1,623 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.dlp_v2.types import dlp + + +class ListInspectTemplatesPager: + """A pager for iterating through ``list_inspect_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``inspect_templates`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInspectTemplates`` requests and continue to iterate + through the ``inspect_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListInspectTemplatesResponse], + request: dlp.ListInspectTemplatesRequest, + response: dlp.ListInspectTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListInspectTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListInspectTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.InspectTemplate]: + for page in self.pages: + yield from page.inspect_templates + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInspectTemplatesAsyncPager: + """A pager for iterating through ``list_inspect_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``inspect_templates`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInspectTemplates`` requests and continue to iterate + through the ``inspect_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListInspectTemplatesResponse]], + request: dlp.ListInspectTemplatesRequest, + response: dlp.ListInspectTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListInspectTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListInspectTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.InspectTemplate]: + async def async_generator(): + async for page in self.pages: + for response in page.inspect_templates: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDeidentifyTemplatesPager: + """A pager for iterating through ``list_deidentify_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deidentify_templates`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDeidentifyTemplates`` requests and continue to iterate + through the ``deidentify_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListDeidentifyTemplatesResponse], + request: dlp.ListDeidentifyTemplatesRequest, + response: dlp.ListDeidentifyTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDeidentifyTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListDeidentifyTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.DeidentifyTemplate]: + for page in self.pages: + yield from page.deidentify_templates + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDeidentifyTemplatesAsyncPager: + """A pager for iterating through ``list_deidentify_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``deidentify_templates`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDeidentifyTemplates`` requests and continue to iterate + through the ``deidentify_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListDeidentifyTemplatesResponse]], + request: dlp.ListDeidentifyTemplatesRequest, + response: dlp.ListDeidentifyTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDeidentifyTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListDeidentifyTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.DeidentifyTemplate]: + async def async_generator(): + async for page in self.pages: + for response in page.deidentify_templates: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobTriggersPager: + """A pager for iterating through ``list_job_triggers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``job_triggers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobTriggers`` requests and continue to iterate + through the ``job_triggers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListJobTriggersResponse], + request: dlp.ListJobTriggersRequest, + response: dlp.ListJobTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListJobTriggersRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListJobTriggersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListJobTriggersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListJobTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.JobTrigger]: + for page in self.pages: + yield from page.job_triggers + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobTriggersAsyncPager: + """A pager for iterating through ``list_job_triggers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``job_triggers`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobTriggers`` requests and continue to iterate + through the ``job_triggers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListJobTriggersResponse]], + request: dlp.ListJobTriggersRequest, + response: dlp.ListJobTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListJobTriggersRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListJobTriggersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListJobTriggersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListJobTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.JobTrigger]: + async def async_generator(): + async for page in self.pages: + for response in page.job_triggers: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDlpJobsPager: + """A pager for iterating through ``list_dlp_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDlpJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListDlpJobsResponse], + request: dlp.ListDlpJobsRequest, + response: dlp.ListDlpJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDlpJobsRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDlpJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDlpJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListDlpJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.DlpJob]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDlpJobsAsyncPager: + """A pager for iterating through ``list_dlp_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDlpJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListDlpJobsResponse]], + request: dlp.ListDlpJobsRequest, + response: dlp.ListDlpJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDlpJobsRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDlpJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDlpJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListDlpJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.DlpJob]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListStoredInfoTypesPager: + """A pager for iterating through ``list_stored_info_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``stored_info_types`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListStoredInfoTypes`` requests and continue to iterate + through the ``stored_info_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListStoredInfoTypesResponse], + request: dlp.ListStoredInfoTypesRequest, + response: dlp.ListStoredInfoTypesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListStoredInfoTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListStoredInfoTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.StoredInfoType]: + for page in self.pages: + yield from page.stored_info_types + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListStoredInfoTypesAsyncPager: + """A pager for iterating through ``list_stored_info_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``stored_info_types`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListStoredInfoTypes`` requests and continue to iterate + through the ``stored_info_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListStoredInfoTypesResponse]], + request: dlp.ListStoredInfoTypesRequest, + response: dlp.ListStoredInfoTypesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListStoredInfoTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListStoredInfoTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.StoredInfoType]: + async def async_generator(): + async for page in self.pages: + for response in page.stored_info_types: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py new file mode 100644 index 00000000..df9b4279 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DlpServiceTransport +from .grpc import DlpServiceGrpcTransport +from .grpc_asyncio import DlpServiceGrpcAsyncIOTransport +from .rest import DlpServiceRestTransport +from .rest import DlpServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] +_transport_registry['grpc'] = DlpServiceGrpcTransport +_transport_registry['grpc_asyncio'] = DlpServiceGrpcAsyncIOTransport +_transport_registry['rest'] = DlpServiceRestTransport + +__all__ = ( + 'DlpServiceTransport', + 'DlpServiceGrpcTransport', + 'DlpServiceGrpcAsyncIOTransport', + 'DlpServiceRestTransport', + 'DlpServiceRestInterceptor', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py new file mode 100644 index 00000000..e90545e1 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py @@ -0,0 +1,752 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dlp_v2 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dlp_v2.types import dlp +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class DlpServiceTransport(abc.ABC): + """Abstract transport class for DlpService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dlp.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.inspect_content: gapic_v1.method.wrap_method( + self.inspect_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.redact_image: gapic_v1.method.wrap_method( + self.redact_image, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.deidentify_content: gapic_v1.method.wrap_method( + self.deidentify_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.reidentify_content: gapic_v1.method.wrap_method( + self.reidentify_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_info_types: gapic_v1.method.wrap_method( + self.list_info_types, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_inspect_template: gapic_v1.method.wrap_method( + self.create_inspect_template, + default_timeout=300.0, + client_info=client_info, + ), + self.update_inspect_template: gapic_v1.method.wrap_method( + self.update_inspect_template, + default_timeout=300.0, + client_info=client_info, + ), + self.get_inspect_template: gapic_v1.method.wrap_method( + self.get_inspect_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_inspect_templates: gapic_v1.method.wrap_method( + self.list_inspect_templates, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_inspect_template: gapic_v1.method.wrap_method( + self.delete_inspect_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_deidentify_template: gapic_v1.method.wrap_method( + self.create_deidentify_template, + default_timeout=300.0, + client_info=client_info, + ), + self.update_deidentify_template: gapic_v1.method.wrap_method( + self.update_deidentify_template, + default_timeout=300.0, + client_info=client_info, + ), + self.get_deidentify_template: gapic_v1.method.wrap_method( + self.get_deidentify_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_deidentify_templates: gapic_v1.method.wrap_method( + self.list_deidentify_templates, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_deidentify_template: gapic_v1.method.wrap_method( + self.delete_deidentify_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_job_trigger: gapic_v1.method.wrap_method( + self.create_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.update_job_trigger: gapic_v1.method.wrap_method( + self.update_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.hybrid_inspect_job_trigger: gapic_v1.method.wrap_method( + self.hybrid_inspect_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.get_job_trigger: gapic_v1.method.wrap_method( + self.get_job_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_job_triggers: gapic_v1.method.wrap_method( + self.list_job_triggers, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_job_trigger: gapic_v1.method.wrap_method( + self.delete_job_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.activate_job_trigger: gapic_v1.method.wrap_method( + self.activate_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.create_dlp_job: gapic_v1.method.wrap_method( + self.create_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.list_dlp_jobs: gapic_v1.method.wrap_method( + self.list_dlp_jobs, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.get_dlp_job: gapic_v1.method.wrap_method( + self.get_dlp_job, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_dlp_job: gapic_v1.method.wrap_method( + self.delete_dlp_job, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.cancel_dlp_job: gapic_v1.method.wrap_method( + self.cancel_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.create_stored_info_type: gapic_v1.method.wrap_method( + self.create_stored_info_type, + default_timeout=300.0, + client_info=client_info, + ), + self.update_stored_info_type: gapic_v1.method.wrap_method( + self.update_stored_info_type, + default_timeout=300.0, + client_info=client_info, + ), + self.get_stored_info_type: gapic_v1.method.wrap_method( + self.get_stored_info_type, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_stored_info_types: gapic_v1.method.wrap_method( + self.list_stored_info_types, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_stored_info_type: gapic_v1.method.wrap_method( + self.delete_stored_info_type, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.hybrid_inspect_dlp_job: gapic_v1.method.wrap_method( + self.hybrid_inspect_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.finish_dlp_job: gapic_v1.method.wrap_method( + self.finish_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + Union[ + dlp.InspectContentResponse, + Awaitable[dlp.InspectContentResponse] + ]]: + raise NotImplementedError() + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + Union[ + dlp.RedactImageResponse, + Awaitable[dlp.RedactImageResponse] + ]]: + raise NotImplementedError() + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + Union[ + dlp.DeidentifyContentResponse, + Awaitable[dlp.DeidentifyContentResponse] + ]]: + raise NotImplementedError() + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + Union[ + dlp.ReidentifyContentResponse, + Awaitable[dlp.ReidentifyContentResponse] + ]]: + raise NotImplementedError() + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + Union[ + dlp.ListInfoTypesResponse, + Awaitable[dlp.ListInfoTypesResponse] + ]]: + raise NotImplementedError() + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + Union[ + dlp.InspectTemplate, + Awaitable[dlp.InspectTemplate] + ]]: + raise NotImplementedError() + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + Union[ + dlp.InspectTemplate, + Awaitable[dlp.InspectTemplate] + ]]: + raise NotImplementedError() + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + Union[ + dlp.InspectTemplate, + Awaitable[dlp.InspectTemplate] + ]]: + raise NotImplementedError() + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + Union[ + dlp.ListInspectTemplatesResponse, + Awaitable[dlp.ListInspectTemplatesResponse] + ]]: + raise NotImplementedError() + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + Union[ + dlp.DeidentifyTemplate, + Awaitable[dlp.DeidentifyTemplate] + ]]: + raise NotImplementedError() + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + Union[ + dlp.DeidentifyTemplate, + Awaitable[dlp.DeidentifyTemplate] + ]]: + raise NotImplementedError() + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + Union[ + dlp.DeidentifyTemplate, + Awaitable[dlp.DeidentifyTemplate] + ]]: + raise NotImplementedError() + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + Union[ + dlp.ListDeidentifyTemplatesResponse, + Awaitable[dlp.ListDeidentifyTemplatesResponse] + ]]: + raise NotImplementedError() + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + Union[ + dlp.JobTrigger, + Awaitable[dlp.JobTrigger] + ]]: + raise NotImplementedError() + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + Union[ + dlp.JobTrigger, + Awaitable[dlp.JobTrigger] + ]]: + raise NotImplementedError() + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + Union[ + dlp.HybridInspectResponse, + Awaitable[dlp.HybridInspectResponse] + ]]: + raise NotImplementedError() + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + Union[ + dlp.JobTrigger, + Awaitable[dlp.JobTrigger] + ]]: + raise NotImplementedError() + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + Union[ + dlp.ListJobTriggersResponse, + Awaitable[dlp.ListJobTriggersResponse] + ]]: + raise NotImplementedError() + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + Union[ + dlp.DlpJob, + Awaitable[dlp.DlpJob] + ]]: + raise NotImplementedError() + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + Union[ + dlp.DlpJob, + Awaitable[dlp.DlpJob] + ]]: + raise NotImplementedError() + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + Union[ + dlp.ListDlpJobsResponse, + Awaitable[dlp.ListDlpJobsResponse] + ]]: + raise NotImplementedError() + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + Union[ + dlp.DlpJob, + Awaitable[dlp.DlpJob] + ]]: + raise NotImplementedError() + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + Union[ + dlp.StoredInfoType, + Awaitable[dlp.StoredInfoType] + ]]: + raise NotImplementedError() + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + Union[ + dlp.StoredInfoType, + Awaitable[dlp.StoredInfoType] + ]]: + raise NotImplementedError() + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + Union[ + dlp.StoredInfoType, + Awaitable[dlp.StoredInfoType] + ]]: + raise NotImplementedError() + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + Union[ + dlp.ListStoredInfoTypesResponse, + Awaitable[dlp.ListStoredInfoTypesResponse] + ]]: + raise NotImplementedError() + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + Union[ + dlp.HybridInspectResponse, + Awaitable[dlp.HybridInspectResponse] + ]]: + raise NotImplementedError() + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'DlpServiceTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py new file mode 100644 index 00000000..d95be0ba --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py @@ -0,0 +1,1262 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dlp_v2.types import dlp +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO + + +class DlpServiceGrpcTransport(DlpServiceTransport): + """gRPC backend transport for DlpService. + + The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + dlp.InspectContentResponse]: + r"""Return a callable for the inspect content method over gRPC. + + Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + Returns: + Callable[[~.InspectContentRequest], + ~.InspectContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'inspect_content' not in self._stubs: + self._stubs['inspect_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/InspectContent', + request_serializer=dlp.InspectContentRequest.serialize, + response_deserializer=dlp.InspectContentResponse.deserialize, + ) + return self._stubs['inspect_content'] + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + dlp.RedactImageResponse]: + r"""Return a callable for the redact image method over gRPC. + + Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.RedactImageRequest], + ~.RedactImageResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'redact_image' not in self._stubs: + self._stubs['redact_image'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/RedactImage', + request_serializer=dlp.RedactImageRequest.serialize, + response_deserializer=dlp.RedactImageResponse.deserialize, + ) + return self._stubs['redact_image'] + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + dlp.DeidentifyContentResponse]: + r"""Return a callable for the deidentify content method over gRPC. + + De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.DeidentifyContentRequest], + ~.DeidentifyContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'deidentify_content' not in self._stubs: + self._stubs['deidentify_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeidentifyContent', + request_serializer=dlp.DeidentifyContentRequest.serialize, + response_deserializer=dlp.DeidentifyContentResponse.deserialize, + ) + return self._stubs['deidentify_content'] + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + dlp.ReidentifyContentResponse]: + r"""Return a callable for the reidentify content method over gRPC. + + Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + Returns: + Callable[[~.ReidentifyContentRequest], + ~.ReidentifyContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'reidentify_content' not in self._stubs: + self._stubs['reidentify_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ReidentifyContent', + request_serializer=dlp.ReidentifyContentRequest.serialize, + response_deserializer=dlp.ReidentifyContentResponse.deserialize, + ) + return self._stubs['reidentify_content'] + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + dlp.ListInfoTypesResponse]: + r"""Return a callable for the list info types method over gRPC. + + Returns a list of the sensitive information types + that DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + Returns: + Callable[[~.ListInfoTypesRequest], + ~.ListInfoTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_info_types' not in self._stubs: + self._stubs['list_info_types'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInfoTypes', + request_serializer=dlp.ListInfoTypesRequest.serialize, + response_deserializer=dlp.ListInfoTypesResponse.deserialize, + ) + return self._stubs['list_info_types'] + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + dlp.InspectTemplate]: + r"""Return a callable for the create inspect template method over gRPC. + + Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/dlp/docs/creating-templates to + learn more. + + Returns: + Callable[[~.CreateInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_inspect_template' not in self._stubs: + self._stubs['create_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', + request_serializer=dlp.CreateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['create_inspect_template'] + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + dlp.InspectTemplate]: + r"""Return a callable for the update inspect template method over gRPC. + + Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.UpdateInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_inspect_template' not in self._stubs: + self._stubs['update_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', + request_serializer=dlp.UpdateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['update_inspect_template'] + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + dlp.InspectTemplate]: + r"""Return a callable for the get inspect template method over gRPC. + + Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.GetInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_inspect_template' not in self._stubs: + self._stubs['get_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', + request_serializer=dlp.GetInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['get_inspect_template'] + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + dlp.ListInspectTemplatesResponse]: + r"""Return a callable for the list inspect templates method over gRPC. + + Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.ListInspectTemplatesRequest], + ~.ListInspectTemplatesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_inspect_templates' not in self._stubs: + self._stubs['list_inspect_templates'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', + request_serializer=dlp.ListInspectTemplatesRequest.serialize, + response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, + ) + return self._stubs['list_inspect_templates'] + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete inspect template method over gRPC. + + Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.DeleteInspectTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_inspect_template' not in self._stubs: + self._stubs['delete_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', + request_serializer=dlp.DeleteInspectTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_inspect_template'] + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + r"""Return a callable for the create deidentify template method over gRPC. + + Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.CreateDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_deidentify_template' not in self._stubs: + self._stubs['create_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', + request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['create_deidentify_template'] + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + r"""Return a callable for the update deidentify template method over gRPC. + + Updates the DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.UpdateDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_deidentify_template' not in self._stubs: + self._stubs['update_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', + request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['update_deidentify_template'] + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + r"""Return a callable for the get deidentify template method over gRPC. + + Gets a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.GetDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_deidentify_template' not in self._stubs: + self._stubs['get_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', + request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['get_deidentify_template'] + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + dlp.ListDeidentifyTemplatesResponse]: + r"""Return a callable for the list deidentify templates method over gRPC. + + Lists DeidentifyTemplates. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.ListDeidentifyTemplatesRequest], + ~.ListDeidentifyTemplatesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_deidentify_templates' not in self._stubs: + self._stubs['list_deidentify_templates'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', + request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, + response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, + ) + return self._stubs['list_deidentify_templates'] + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete deidentify template method over gRPC. + + Deletes a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.DeleteDeidentifyTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_deidentify_template' not in self._stubs: + self._stubs['delete_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', + request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_deidentify_template'] + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + dlp.JobTrigger]: + r"""Return a callable for the create job trigger method over gRPC. + + Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.CreateJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job_trigger' not in self._stubs: + self._stubs['create_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', + request_serializer=dlp.CreateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['create_job_trigger'] + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + dlp.JobTrigger]: + r"""Return a callable for the update job trigger method over gRPC. + + Updates a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.UpdateJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_job_trigger' not in self._stubs: + self._stubs['update_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', + request_serializer=dlp.UpdateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['update_job_trigger'] + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + dlp.HybridInspectResponse]: + r"""Return a callable for the hybrid inspect job trigger method over gRPC. + + Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + Returns: + Callable[[~.HybridInspectJobTriggerRequest], + ~.HybridInspectResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_job_trigger' not in self._stubs: + self._stubs['hybrid_inspect_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', + request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_job_trigger'] + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + dlp.JobTrigger]: + r"""Return a callable for the get job trigger method over gRPC. + + Gets a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.GetJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_trigger' not in self._stubs: + self._stubs['get_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetJobTrigger', + request_serializer=dlp.GetJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['get_job_trigger'] + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + dlp.ListJobTriggersResponse]: + r"""Return a callable for the list job triggers method over gRPC. + + Lists job triggers. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.ListJobTriggersRequest], + ~.ListJobTriggersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_job_triggers' not in self._stubs: + self._stubs['list_job_triggers'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListJobTriggers', + request_serializer=dlp.ListJobTriggersRequest.serialize, + response_deserializer=dlp.ListJobTriggersResponse.deserialize, + ) + return self._stubs['list_job_triggers'] + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete job trigger method over gRPC. + + Deletes a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.DeleteJobTriggerRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job_trigger' not in self._stubs: + self._stubs['delete_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', + request_serializer=dlp.DeleteJobTriggerRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job_trigger'] + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + dlp.DlpJob]: + r"""Return a callable for the activate job trigger method over gRPC. + + Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + Returns: + Callable[[~.ActivateJobTriggerRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'activate_job_trigger' not in self._stubs: + self._stubs['activate_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', + request_serializer=dlp.ActivateJobTriggerRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['activate_job_trigger'] + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + dlp.DlpJob]: + r"""Return a callable for the create dlp job method over gRPC. + + Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.CreateDlpJobRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_dlp_job' not in self._stubs: + self._stubs['create_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDlpJob', + request_serializer=dlp.CreateDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['create_dlp_job'] + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + dlp.ListDlpJobsResponse]: + r"""Return a callable for the list dlp jobs method over gRPC. + + Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.ListDlpJobsRequest], + ~.ListDlpJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_dlp_jobs' not in self._stubs: + self._stubs['list_dlp_jobs'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDlpJobs', + request_serializer=dlp.ListDlpJobsRequest.serialize, + response_deserializer=dlp.ListDlpJobsResponse.deserialize, + ) + return self._stubs['list_dlp_jobs'] + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + dlp.DlpJob]: + r"""Return a callable for the get dlp job method over gRPC. + + Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.GetDlpJobRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_dlp_job' not in self._stubs: + self._stubs['get_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDlpJob', + request_serializer=dlp.GetDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['get_dlp_job'] + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete dlp job method over gRPC. + + Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.DeleteDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_dlp_job' not in self._stubs: + self._stubs['delete_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', + request_serializer=dlp.DeleteDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_dlp_job'] + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the cancel dlp job method over gRPC. + + Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.CancelDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_dlp_job' not in self._stubs: + self._stubs['cancel_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CancelDlpJob', + request_serializer=dlp.CancelDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['cancel_dlp_job'] + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + dlp.StoredInfoType]: + r"""Return a callable for the create stored info type method over gRPC. + + Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.CreateStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_stored_info_type' not in self._stubs: + self._stubs['create_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', + request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['create_stored_info_type'] + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + dlp.StoredInfoType]: + r"""Return a callable for the update stored info type method over gRPC. + + Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.UpdateStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_stored_info_type' not in self._stubs: + self._stubs['update_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', + request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['update_stored_info_type'] + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + dlp.StoredInfoType]: + r"""Return a callable for the get stored info type method over gRPC. + + Gets a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.GetStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_stored_info_type' not in self._stubs: + self._stubs['get_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', + request_serializer=dlp.GetStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['get_stored_info_type'] + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + dlp.ListStoredInfoTypesResponse]: + r"""Return a callable for the list stored info types method over gRPC. + + Lists stored infoTypes. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.ListStoredInfoTypesRequest], + ~.ListStoredInfoTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_stored_info_types' not in self._stubs: + self._stubs['list_stored_info_types'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', + request_serializer=dlp.ListStoredInfoTypesRequest.serialize, + response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, + ) + return self._stubs['list_stored_info_types'] + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete stored info type method over gRPC. + + Deletes a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.DeleteStoredInfoTypeRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_stored_info_type' not in self._stubs: + self._stubs['delete_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', + request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_stored_info_type'] + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + dlp.HybridInspectResponse]: + r"""Return a callable for the hybrid inspect dlp job method over gRPC. + + Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + Returns: + Callable[[~.HybridInspectDlpJobRequest], + ~.HybridInspectResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_dlp_job' not in self._stubs: + self._stubs['hybrid_inspect_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', + request_serializer=dlp.HybridInspectDlpJobRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_dlp_job'] + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the finish dlp job method over gRPC. + + Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + Returns: + Callable[[~.FinishDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'finish_dlp_job' not in self._stubs: + self._stubs['finish_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/FinishDlpJob', + request_serializer=dlp.FinishDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['finish_dlp_job'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'DlpServiceGrpcTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py new file mode 100644 index 00000000..03c8bf3c --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py @@ -0,0 +1,1261 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dlp_v2.types import dlp +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import DlpServiceGrpcTransport + + +class DlpServiceGrpcAsyncIOTransport(DlpServiceTransport): + """gRPC AsyncIO backend transport for DlpService. + + The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + Awaitable[dlp.InspectContentResponse]]: + r"""Return a callable for the inspect content method over gRPC. + + Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + Returns: + Callable[[~.InspectContentRequest], + Awaitable[~.InspectContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'inspect_content' not in self._stubs: + self._stubs['inspect_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/InspectContent', + request_serializer=dlp.InspectContentRequest.serialize, + response_deserializer=dlp.InspectContentResponse.deserialize, + ) + return self._stubs['inspect_content'] + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + Awaitable[dlp.RedactImageResponse]]: + r"""Return a callable for the redact image method over gRPC. + + Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.RedactImageRequest], + Awaitable[~.RedactImageResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'redact_image' not in self._stubs: + self._stubs['redact_image'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/RedactImage', + request_serializer=dlp.RedactImageRequest.serialize, + response_deserializer=dlp.RedactImageResponse.deserialize, + ) + return self._stubs['redact_image'] + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + Awaitable[dlp.DeidentifyContentResponse]]: + r"""Return a callable for the deidentify content method over gRPC. + + De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.DeidentifyContentRequest], + Awaitable[~.DeidentifyContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'deidentify_content' not in self._stubs: + self._stubs['deidentify_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeidentifyContent', + request_serializer=dlp.DeidentifyContentRequest.serialize, + response_deserializer=dlp.DeidentifyContentResponse.deserialize, + ) + return self._stubs['deidentify_content'] + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + Awaitable[dlp.ReidentifyContentResponse]]: + r"""Return a callable for the reidentify content method over gRPC. + + Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + Returns: + Callable[[~.ReidentifyContentRequest], + Awaitable[~.ReidentifyContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'reidentify_content' not in self._stubs: + self._stubs['reidentify_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ReidentifyContent', + request_serializer=dlp.ReidentifyContentRequest.serialize, + response_deserializer=dlp.ReidentifyContentResponse.deserialize, + ) + return self._stubs['reidentify_content'] + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + Awaitable[dlp.ListInfoTypesResponse]]: + r"""Return a callable for the list info types method over gRPC. + + Returns a list of the sensitive information types + that DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + Returns: + Callable[[~.ListInfoTypesRequest], + Awaitable[~.ListInfoTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_info_types' not in self._stubs: + self._stubs['list_info_types'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInfoTypes', + request_serializer=dlp.ListInfoTypesRequest.serialize, + response_deserializer=dlp.ListInfoTypesResponse.deserialize, + ) + return self._stubs['list_info_types'] + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the create inspect template method over gRPC. + + Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/dlp/docs/creating-templates to + learn more. + + Returns: + Callable[[~.CreateInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_inspect_template' not in self._stubs: + self._stubs['create_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', + request_serializer=dlp.CreateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['create_inspect_template'] + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the update inspect template method over gRPC. + + Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.UpdateInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_inspect_template' not in self._stubs: + self._stubs['update_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', + request_serializer=dlp.UpdateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['update_inspect_template'] + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the get inspect template method over gRPC. + + Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.GetInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_inspect_template' not in self._stubs: + self._stubs['get_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', + request_serializer=dlp.GetInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['get_inspect_template'] + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + Awaitable[dlp.ListInspectTemplatesResponse]]: + r"""Return a callable for the list inspect templates method over gRPC. + + Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.ListInspectTemplatesRequest], + Awaitable[~.ListInspectTemplatesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_inspect_templates' not in self._stubs: + self._stubs['list_inspect_templates'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', + request_serializer=dlp.ListInspectTemplatesRequest.serialize, + response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, + ) + return self._stubs['list_inspect_templates'] + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete inspect template method over gRPC. + + Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.DeleteInspectTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_inspect_template' not in self._stubs: + self._stubs['delete_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', + request_serializer=dlp.DeleteInspectTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_inspect_template'] + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + Awaitable[dlp.DeidentifyTemplate]]: + r"""Return a callable for the create deidentify template method over gRPC. + + Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.CreateDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_deidentify_template' not in self._stubs: + self._stubs['create_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', + request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['create_deidentify_template'] + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + Awaitable[dlp.DeidentifyTemplate]]: + r"""Return a callable for the update deidentify template method over gRPC. + + Updates the DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.UpdateDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_deidentify_template' not in self._stubs: + self._stubs['update_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', + request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['update_deidentify_template'] + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + Awaitable[dlp.DeidentifyTemplate]]: + r"""Return a callable for the get deidentify template method over gRPC. + + Gets a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.GetDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_deidentify_template' not in self._stubs: + self._stubs['get_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', + request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['get_deidentify_template'] + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + Awaitable[dlp.ListDeidentifyTemplatesResponse]]: + r"""Return a callable for the list deidentify templates method over gRPC. + + Lists DeidentifyTemplates. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.ListDeidentifyTemplatesRequest], + Awaitable[~.ListDeidentifyTemplatesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_deidentify_templates' not in self._stubs: + self._stubs['list_deidentify_templates'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', + request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, + response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, + ) + return self._stubs['list_deidentify_templates'] + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete deidentify template method over gRPC. + + Deletes a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.DeleteDeidentifyTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_deidentify_template' not in self._stubs: + self._stubs['delete_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', + request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_deidentify_template'] + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the create job trigger method over gRPC. + + Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.CreateJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job_trigger' not in self._stubs: + self._stubs['create_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', + request_serializer=dlp.CreateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['create_job_trigger'] + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the update job trigger method over gRPC. + + Updates a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.UpdateJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_job_trigger' not in self._stubs: + self._stubs['update_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', + request_serializer=dlp.UpdateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['update_job_trigger'] + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + Awaitable[dlp.HybridInspectResponse]]: + r"""Return a callable for the hybrid inspect job trigger method over gRPC. + + Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + Returns: + Callable[[~.HybridInspectJobTriggerRequest], + Awaitable[~.HybridInspectResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_job_trigger' not in self._stubs: + self._stubs['hybrid_inspect_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', + request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_job_trigger'] + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the get job trigger method over gRPC. + + Gets a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.GetJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_trigger' not in self._stubs: + self._stubs['get_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetJobTrigger', + request_serializer=dlp.GetJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['get_job_trigger'] + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + Awaitable[dlp.ListJobTriggersResponse]]: + r"""Return a callable for the list job triggers method over gRPC. + + Lists job triggers. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.ListJobTriggersRequest], + Awaitable[~.ListJobTriggersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_job_triggers' not in self._stubs: + self._stubs['list_job_triggers'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListJobTriggers', + request_serializer=dlp.ListJobTriggersRequest.serialize, + response_deserializer=dlp.ListJobTriggersResponse.deserialize, + ) + return self._stubs['list_job_triggers'] + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete job trigger method over gRPC. + + Deletes a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.DeleteJobTriggerRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job_trigger' not in self._stubs: + self._stubs['delete_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', + request_serializer=dlp.DeleteJobTriggerRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job_trigger'] + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + Awaitable[dlp.DlpJob]]: + r"""Return a callable for the activate job trigger method over gRPC. + + Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + Returns: + Callable[[~.ActivateJobTriggerRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'activate_job_trigger' not in self._stubs: + self._stubs['activate_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', + request_serializer=dlp.ActivateJobTriggerRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['activate_job_trigger'] + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + Awaitable[dlp.DlpJob]]: + r"""Return a callable for the create dlp job method over gRPC. + + Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.CreateDlpJobRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_dlp_job' not in self._stubs: + self._stubs['create_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDlpJob', + request_serializer=dlp.CreateDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['create_dlp_job'] + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + Awaitable[dlp.ListDlpJobsResponse]]: + r"""Return a callable for the list dlp jobs method over gRPC. + + Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.ListDlpJobsRequest], + Awaitable[~.ListDlpJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_dlp_jobs' not in self._stubs: + self._stubs['list_dlp_jobs'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDlpJobs', + request_serializer=dlp.ListDlpJobsRequest.serialize, + response_deserializer=dlp.ListDlpJobsResponse.deserialize, + ) + return self._stubs['list_dlp_jobs'] + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + Awaitable[dlp.DlpJob]]: + r"""Return a callable for the get dlp job method over gRPC. + + Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.GetDlpJobRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_dlp_job' not in self._stubs: + self._stubs['get_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDlpJob', + request_serializer=dlp.GetDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['get_dlp_job'] + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete dlp job method over gRPC. + + Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.DeleteDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_dlp_job' not in self._stubs: + self._stubs['delete_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', + request_serializer=dlp.DeleteDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_dlp_job'] + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the cancel dlp job method over gRPC. + + Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.CancelDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_dlp_job' not in self._stubs: + self._stubs['cancel_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CancelDlpJob', + request_serializer=dlp.CancelDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['cancel_dlp_job'] + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the create stored info type method over gRPC. + + Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.CreateStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_stored_info_type' not in self._stubs: + self._stubs['create_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', + request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['create_stored_info_type'] + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the update stored info type method over gRPC. + + Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.UpdateStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_stored_info_type' not in self._stubs: + self._stubs['update_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', + request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['update_stored_info_type'] + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the get stored info type method over gRPC. + + Gets a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.GetStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_stored_info_type' not in self._stubs: + self._stubs['get_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', + request_serializer=dlp.GetStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['get_stored_info_type'] + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + Awaitable[dlp.ListStoredInfoTypesResponse]]: + r"""Return a callable for the list stored info types method over gRPC. + + Lists stored infoTypes. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.ListStoredInfoTypesRequest], + Awaitable[~.ListStoredInfoTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_stored_info_types' not in self._stubs: + self._stubs['list_stored_info_types'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', + request_serializer=dlp.ListStoredInfoTypesRequest.serialize, + response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, + ) + return self._stubs['list_stored_info_types'] + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete stored info type method over gRPC. + + Deletes a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.DeleteStoredInfoTypeRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_stored_info_type' not in self._stubs: + self._stubs['delete_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', + request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_stored_info_type'] + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + Awaitable[dlp.HybridInspectResponse]]: + r"""Return a callable for the hybrid inspect dlp job method over gRPC. + + Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + Returns: + Callable[[~.HybridInspectDlpJobRequest], + Awaitable[~.HybridInspectResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_dlp_job' not in self._stubs: + self._stubs['hybrid_inspect_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', + request_serializer=dlp.HybridInspectDlpJobRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_dlp_job'] + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the finish dlp job method over gRPC. + + Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + Returns: + Callable[[~.FinishDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'finish_dlp_job' not in self._stubs: + self._stubs['finish_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/FinishDlpJob', + request_serializer=dlp.FinishDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['finish_dlp_job'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'DlpServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py new file mode 100644 index 00000000..105716f4 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py @@ -0,0 +1,4325 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.cloud.location import locations_pb2 # type: ignore +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dlp_v2.types import dlp +from google.protobuf import empty_pb2 # type: ignore + +from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DlpServiceRestInterceptor: + """Interceptor for DlpService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DlpServiceRestTransport. + + .. code-block:: python + class MyCustomDlpServiceInterceptor(DlpServiceRestInterceptor): + def pre_activate_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_activate_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_cancel_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_create_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_deidentify_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_dlp_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_inspect_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_stored_info_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_deidentify_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_deidentify_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_finish_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_deidentify_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_dlp_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_inspect_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_stored_info_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_hybrid_inspect_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_hybrid_inspect_dlp_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_hybrid_inspect_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_hybrid_inspect_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_inspect_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_inspect_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_deidentify_templates(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_deidentify_templates(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_dlp_jobs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_dlp_jobs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_info_types(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_info_types(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_inspect_templates(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_inspect_templates(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_job_triggers(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_job_triggers(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_stored_info_types(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_stored_info_types(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_redact_image(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_redact_image(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_reidentify_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_reidentify_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_deidentify_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_inspect_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_stored_info_type(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DlpServiceRestTransport(interceptor=MyCustomDlpServiceInterceptor()) + client = DlpServiceClient(transport=transport) + + + """ + def pre_activate_job_trigger(self, request: dlp.ActivateJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ActivateJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for activate_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_activate_job_trigger(self, response: dlp.DlpJob) -> dlp.DlpJob: + """Post-rpc interceptor for activate_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_cancel_dlp_job(self, request: dlp.CancelDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CancelDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_create_deidentify_template(self, request: dlp.CreateDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: + """Post-rpc interceptor for create_deidentify_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_create_dlp_job(self, request: dlp.CreateDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: + """Post-rpc interceptor for create_dlp_job + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_create_inspect_template(self, request: dlp.CreateInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateInspectTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: + """Post-rpc interceptor for create_inspect_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_create_job_trigger(self, request: dlp.CreateJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: + """Post-rpc interceptor for create_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_create_stored_info_type(self, request: dlp.CreateStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: + """Post-rpc interceptor for create_stored_info_type + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_deidentify_content(self, request: dlp.DeidentifyContentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeidentifyContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for deidentify_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_deidentify_content(self, response: dlp.DeidentifyContentResponse) -> dlp.DeidentifyContentResponse: + """Post-rpc interceptor for deidentify_content + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_delete_deidentify_template(self, request: dlp.DeleteDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_dlp_job(self, request: dlp.DeleteDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_inspect_template(self, request: dlp.DeleteInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteInspectTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_job_trigger(self, request: dlp.DeleteJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_stored_info_type(self, request: dlp.DeleteStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_finish_dlp_job(self, request: dlp.FinishDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.FinishDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for finish_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_get_deidentify_template(self, request: dlp.GetDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: + """Post-rpc interceptor for get_deidentify_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_get_dlp_job(self, request: dlp.GetDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: + """Post-rpc interceptor for get_dlp_job + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_get_inspect_template(self, request: dlp.GetInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetInspectTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: + """Post-rpc interceptor for get_inspect_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_get_job_trigger(self, request: dlp.GetJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: + """Post-rpc interceptor for get_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_get_stored_info_type(self, request: dlp.GetStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: + """Post-rpc interceptor for get_stored_info_type + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_hybrid_inspect_dlp_job(self, request: dlp.HybridInspectDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.HybridInspectDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for hybrid_inspect_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_hybrid_inspect_dlp_job(self, response: dlp.HybridInspectResponse) -> dlp.HybridInspectResponse: + """Post-rpc interceptor for hybrid_inspect_dlp_job + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_hybrid_inspect_job_trigger(self, request: dlp.HybridInspectJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.HybridInspectJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for hybrid_inspect_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_hybrid_inspect_job_trigger(self, response: dlp.HybridInspectResponse) -> dlp.HybridInspectResponse: + """Post-rpc interceptor for hybrid_inspect_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_inspect_content(self, request: dlp.InspectContentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.InspectContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for inspect_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_inspect_content(self, response: dlp.InspectContentResponse) -> dlp.InspectContentResponse: + """Post-rpc interceptor for inspect_content + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_list_deidentify_templates(self, request: dlp.ListDeidentifyTemplatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListDeidentifyTemplatesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_deidentify_templates + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_deidentify_templates(self, response: dlp.ListDeidentifyTemplatesResponse) -> dlp.ListDeidentifyTemplatesResponse: + """Post-rpc interceptor for list_deidentify_templates + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_list_dlp_jobs(self, request: dlp.ListDlpJobsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListDlpJobsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_dlp_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_dlp_jobs(self, response: dlp.ListDlpJobsResponse) -> dlp.ListDlpJobsResponse: + """Post-rpc interceptor for list_dlp_jobs + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_list_info_types(self, request: dlp.ListInfoTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListInfoTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_info_types + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_info_types(self, response: dlp.ListInfoTypesResponse) -> dlp.ListInfoTypesResponse: + """Post-rpc interceptor for list_info_types + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_list_inspect_templates(self, request: dlp.ListInspectTemplatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListInspectTemplatesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_inspect_templates + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_inspect_templates(self, response: dlp.ListInspectTemplatesResponse) -> dlp.ListInspectTemplatesResponse: + """Post-rpc interceptor for list_inspect_templates + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_list_job_triggers(self, request: dlp.ListJobTriggersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListJobTriggersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_job_triggers + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_job_triggers(self, response: dlp.ListJobTriggersResponse) -> dlp.ListJobTriggersResponse: + """Post-rpc interceptor for list_job_triggers + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_list_stored_info_types(self, request: dlp.ListStoredInfoTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListStoredInfoTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_stored_info_types + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_stored_info_types(self, response: dlp.ListStoredInfoTypesResponse) -> dlp.ListStoredInfoTypesResponse: + """Post-rpc interceptor for list_stored_info_types + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_redact_image(self, request: dlp.RedactImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.RedactImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for redact_image + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_redact_image(self, response: dlp.RedactImageResponse) -> dlp.RedactImageResponse: + """Post-rpc interceptor for redact_image + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_reidentify_content(self, request: dlp.ReidentifyContentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ReidentifyContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for reidentify_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_reidentify_content(self, response: dlp.ReidentifyContentResponse) -> dlp.ReidentifyContentResponse: + """Post-rpc interceptor for reidentify_content + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_update_deidentify_template(self, request: dlp.UpdateDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: + """Post-rpc interceptor for update_deidentify_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_update_inspect_template(self, request: dlp.UpdateInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateInspectTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: + """Post-rpc interceptor for update_inspect_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_update_job_trigger(self, request: dlp.UpdateJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: + """Post-rpc interceptor for update_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_update_stored_info_type(self, request: dlp.UpdateStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: + """Post-rpc interceptor for update_stored_info_type + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DlpServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DlpServiceRestInterceptor + + +class DlpServiceRestTransport(DlpServiceTransport): + """REST backend transport for DlpService. + + The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[DlpServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DlpServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ActivateJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("ActivateJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ActivateJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DlpJob: + r"""Call the activate job trigger method over HTTP. + + Args: + request (~.dlp.ActivateJobTriggerRequest): + The request object. Request message for + ActivateJobTrigger. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/jobTriggers/*}:activate', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}:activate', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_activate_job_trigger(request, metadata) + pb_request = dlp.ActivateJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DlpJob() + pb_resp = dlp.DlpJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_activate_job_trigger(resp) + return resp + + class _CancelDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("CancelDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.CancelDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the cancel dlp job method over HTTP. + + Args: + request (~.dlp.CancelDlpJobRequest): + The request object. The request message for canceling a + DLP job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/dlpJobs/*}:cancel', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:cancel', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_cancel_dlp_job(request, metadata) + pb_request = dlp.CancelDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _CreateDeidentifyTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("CreateDeidentifyTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.CreateDeidentifyTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DeidentifyTemplate: + r"""Call the create deidentify + template method over HTTP. + + Args: + request (~.dlp.CreateDeidentifyTemplateRequest): + The request object. Request message for + CreateDeidentifyTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*}/deidentifyTemplates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/deidentifyTemplates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/deidentifyTemplates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/deidentifyTemplates', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_deidentify_template(request, metadata) + pb_request = dlp.CreateDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyTemplate() + pb_resp = dlp.DeidentifyTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_deidentify_template(resp) + return resp + + class _CreateDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("CreateDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.CreateDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DlpJob: + r"""Call the create dlp job method over HTTP. + + Args: + request (~.dlp.CreateDlpJobRequest): + The request object. Request message for + CreateDlpJobRequest. Used to initiate + long running jobs such as calculating + risk metrics or inspecting Google Cloud + Storage. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/dlpJobs', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/dlpJobs', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_dlp_job(request, metadata) + pb_request = dlp.CreateDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DlpJob() + pb_resp = dlp.DlpJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_dlp_job(resp) + return resp + + class _CreateInspectTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("CreateInspectTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.CreateInspectTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.InspectTemplate: + r"""Call the create inspect template method over HTTP. + + Args: + request (~.dlp.CreateInspectTemplateRequest): + The request object. Request message for + CreateInspectTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*}/inspectTemplates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/inspectTemplates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/inspectTemplates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/inspectTemplates', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_inspect_template(request, metadata) + pb_request = dlp.CreateInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectTemplate() + pb_resp = dlp.InspectTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_inspect_template(resp) + return resp + + class _CreateJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("CreateJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.CreateJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.JobTrigger: + r"""Call the create job trigger method over HTTP. + + Args: + request (~.dlp.CreateJobTriggerRequest): + The request object. Request message for CreateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/jobTriggers', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/jobTriggers', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/jobTriggers', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_job_trigger(request, metadata) + pb_request = dlp.CreateJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.JobTrigger() + pb_resp = dlp.JobTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_job_trigger(resp) + return resp + + class _CreateStoredInfoType(DlpServiceRestStub): + def __hash__(self): + return hash("CreateStoredInfoType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.CreateStoredInfoTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.StoredInfoType: + r"""Call the create stored info type method over HTTP. + + Args: + request (~.dlp.CreateStoredInfoTypeRequest): + The request object. Request message for + CreateStoredInfoType. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*}/storedInfoTypes', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/storedInfoTypes', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/storedInfoTypes', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/storedInfoTypes', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_stored_info_type(request, metadata) + pb_request = dlp.CreateStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.StoredInfoType() + pb_resp = dlp.StoredInfoType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_stored_info_type(resp) + return resp + + class _DeidentifyContent(DlpServiceRestStub): + def __hash__(self): + return hash("DeidentifyContent") + + def __call__(self, + request: dlp.DeidentifyContentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DeidentifyContentResponse: + r"""Call the deidentify content method over HTTP. + + Args: + request (~.dlp.DeidentifyContentRequest): + The request object. Request to de-identify a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/content:deidentify', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/content:deidentify', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_deidentify_content(request, metadata) + pb_request = dlp.DeidentifyContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyContentResponse() + pb_resp = dlp.DeidentifyContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_deidentify_content(resp) + return resp + + class _DeleteDeidentifyTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteDeidentifyTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.DeleteDeidentifyTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete deidentify + template method over HTTP. + + Args: + request (~.dlp.DeleteDeidentifyTemplateRequest): + The request object. Request message for + DeleteDeidentifyTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_deidentify_template(request, metadata) + pb_request = dlp.DeleteDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.DeleteDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete dlp job method over HTTP. + + Args: + request (~.dlp.DeleteDlpJobRequest): + The request object. The request message for deleting a + DLP job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/dlpJobs/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_dlp_job(request, metadata) + pb_request = dlp.DeleteDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteInspectTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteInspectTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.DeleteInspectTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete inspect template method over HTTP. + + Args: + request (~.dlp.DeleteInspectTemplateRequest): + The request object. Request message for + DeleteInspectTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/inspectTemplates/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_inspect_template(request, metadata) + pb_request = dlp.DeleteInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.DeleteJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete job trigger method over HTTP. + + Args: + request (~.dlp.DeleteJobTriggerRequest): + The request object. Request message for DeleteJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/jobTriggers/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_job_trigger(request, metadata) + pb_request = dlp.DeleteJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteStoredInfoType(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteStoredInfoType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.DeleteStoredInfoTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete stored info type method over HTTP. + + Args: + request (~.dlp.DeleteStoredInfoTypeRequest): + The request object. Request message for + DeleteStoredInfoType. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_stored_info_type(request, metadata) + pb_request = dlp.DeleteStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _FinishDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("FinishDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.FinishDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the finish dlp job method over HTTP. + + Args: + request (~.dlp.FinishDlpJobRequest): + The request object. The request message for finishing a + DLP hybrid job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:finish', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_finish_dlp_job(request, metadata) + pb_request = dlp.FinishDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetDeidentifyTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("GetDeidentifyTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.GetDeidentifyTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DeidentifyTemplate: + r"""Call the get deidentify template method over HTTP. + + Args: + request (~.dlp.GetDeidentifyTemplateRequest): + The request object. Request message for + GetDeidentifyTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', + }, + ] + request, metadata = self._interceptor.pre_get_deidentify_template(request, metadata) + pb_request = dlp.GetDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyTemplate() + pb_resp = dlp.DeidentifyTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_deidentify_template(resp) + return resp + + class _GetDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("GetDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.GetDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DlpJob: + r"""Call the get dlp job method over HTTP. + + Args: + request (~.dlp.GetDlpJobRequest): + The request object. The request message for [DlpJobs.GetDlpJob][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/dlpJobs/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}', + }, + ] + request, metadata = self._interceptor.pre_get_dlp_job(request, metadata) + pb_request = dlp.GetDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DlpJob() + pb_resp = dlp.DlpJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_dlp_job(resp) + return resp + + class _GetInspectTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("GetInspectTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.GetInspectTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.InspectTemplate: + r"""Call the get inspect template method over HTTP. + + Args: + request (~.dlp.GetInspectTemplateRequest): + The request object. Request message for + GetInspectTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/inspectTemplates/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', + }, + ] + request, metadata = self._interceptor.pre_get_inspect_template(request, metadata) + pb_request = dlp.GetInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectTemplate() + pb_resp = dlp.InspectTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_inspect_template(resp) + return resp + + class _GetJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("GetJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.GetJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.JobTrigger: + r"""Call the get job trigger method over HTTP. + + Args: + request (~.dlp.GetJobTriggerRequest): + The request object. Request message for GetJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/jobTriggers/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', + }, + ] + request, metadata = self._interceptor.pre_get_job_trigger(request, metadata) + pb_request = dlp.GetJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.JobTrigger() + pb_resp = dlp.JobTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job_trigger(resp) + return resp + + class _GetStoredInfoType(DlpServiceRestStub): + def __hash__(self): + return hash("GetStoredInfoType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.GetStoredInfoTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.StoredInfoType: + r"""Call the get stored info type method over HTTP. + + Args: + request (~.dlp.GetStoredInfoTypeRequest): + The request object. Request message for + GetStoredInfoType. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', + }, + ] + request, metadata = self._interceptor.pre_get_stored_info_type(request, metadata) + pb_request = dlp.GetStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.StoredInfoType() + pb_resp = dlp.StoredInfoType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_stored_info_type(resp) + return resp + + class _HybridInspectDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("HybridInspectDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.HybridInspectDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.HybridInspectResponse: + r"""Call the hybrid inspect dlp job method over HTTP. + + Args: + request (~.dlp.HybridInspectDlpJobRequest): + The request object. Request to search for potentially + sensitive info in a custom location. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_hybrid_inspect_dlp_job(request, metadata) + pb_request = dlp.HybridInspectDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.HybridInspectResponse() + pb_resp = dlp.HybridInspectResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_hybrid_inspect_dlp_job(resp) + return resp + + class _HybridInspectJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("HybridInspectJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.HybridInspectJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.HybridInspectResponse: + r"""Call the hybrid inspect job + trigger method over HTTP. + + Args: + request (~.dlp.HybridInspectJobTriggerRequest): + The request object. Request to search for potentially + sensitive info in a custom location. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_hybrid_inspect_job_trigger(request, metadata) + pb_request = dlp.HybridInspectJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.HybridInspectResponse() + pb_resp = dlp.HybridInspectResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_hybrid_inspect_job_trigger(resp) + return resp + + class _InspectContent(DlpServiceRestStub): + def __hash__(self): + return hash("InspectContent") + + def __call__(self, + request: dlp.InspectContentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.InspectContentResponse: + r"""Call the inspect content method over HTTP. + + Args: + request (~.dlp.InspectContentRequest): + The request object. Request to search for potentially + sensitive info in a ContentItem. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectContentResponse: + Results of inspecting an item. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/content:inspect', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/content:inspect', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_inspect_content(request, metadata) + pb_request = dlp.InspectContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectContentResponse() + pb_resp = dlp.InspectContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_inspect_content(resp) + return resp + + class _ListDeidentifyTemplates(DlpServiceRestStub): + def __hash__(self): + return hash("ListDeidentifyTemplates") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ListDeidentifyTemplatesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ListDeidentifyTemplatesResponse: + r"""Call the list deidentify templates method over HTTP. + + Args: + request (~.dlp.ListDeidentifyTemplatesRequest): + The request object. Request message for + ListDeidentifyTemplates. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListDeidentifyTemplatesResponse: + Response message for + ListDeidentifyTemplates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*}/deidentifyTemplates', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/deidentifyTemplates', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/deidentifyTemplates', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/deidentifyTemplates', + }, + ] + request, metadata = self._interceptor.pre_list_deidentify_templates(request, metadata) + pb_request = dlp.ListDeidentifyTemplatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListDeidentifyTemplatesResponse() + pb_resp = dlp.ListDeidentifyTemplatesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_deidentify_templates(resp) + return resp + + class _ListDlpJobs(DlpServiceRestStub): + def __hash__(self): + return hash("ListDlpJobs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ListDlpJobsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ListDlpJobsResponse: + r"""Call the list dlp jobs method over HTTP. + + Args: + request (~.dlp.ListDlpJobsRequest): + The request object. The request message for listing DLP + jobs. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListDlpJobsResponse: + The response message for listing DLP + jobs. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/dlpJobs', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/dlpJobs', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/dlpJobs', + }, + ] + request, metadata = self._interceptor.pre_list_dlp_jobs(request, metadata) + pb_request = dlp.ListDlpJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListDlpJobsResponse() + pb_resp = dlp.ListDlpJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_dlp_jobs(resp) + return resp + + class _ListInfoTypes(DlpServiceRestStub): + def __hash__(self): + return hash("ListInfoTypes") + + def __call__(self, + request: dlp.ListInfoTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ListInfoTypesResponse: + r"""Call the list info types method over HTTP. + + Args: + request (~.dlp.ListInfoTypesRequest): + The request object. Request for the list of infoTypes. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/infoTypes', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=locations/*}/infoTypes', + }, + ] + request, metadata = self._interceptor.pre_list_info_types(request, metadata) + pb_request = dlp.ListInfoTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListInfoTypesResponse() + pb_resp = dlp.ListInfoTypesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_info_types(resp) + return resp + + class _ListInspectTemplates(DlpServiceRestStub): + def __hash__(self): + return hash("ListInspectTemplates") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ListInspectTemplatesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ListInspectTemplatesResponse: + r"""Call the list inspect templates method over HTTP. + + Args: + request (~.dlp.ListInspectTemplatesRequest): + The request object. Request message for + ListInspectTemplates. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListInspectTemplatesResponse: + Response message for + ListInspectTemplates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*}/inspectTemplates', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/inspectTemplates', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/inspectTemplates', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/inspectTemplates', + }, + ] + request, metadata = self._interceptor.pre_list_inspect_templates(request, metadata) + pb_request = dlp.ListInspectTemplatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListInspectTemplatesResponse() + pb_resp = dlp.ListInspectTemplatesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_inspect_templates(resp) + return resp + + class _ListJobTriggers(DlpServiceRestStub): + def __hash__(self): + return hash("ListJobTriggers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ListJobTriggersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ListJobTriggersResponse: + r"""Call the list job triggers method over HTTP. + + Args: + request (~.dlp.ListJobTriggersRequest): + The request object. Request message for ListJobTriggers. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListJobTriggersResponse: + Response message for ListJobTriggers. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/jobTriggers', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/jobTriggers', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/jobTriggers', + }, + ] + request, metadata = self._interceptor.pre_list_job_triggers(request, metadata) + pb_request = dlp.ListJobTriggersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListJobTriggersResponse() + pb_resp = dlp.ListJobTriggersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_job_triggers(resp) + return resp + + class _ListStoredInfoTypes(DlpServiceRestStub): + def __hash__(self): + return hash("ListStoredInfoTypes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ListStoredInfoTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ListStoredInfoTypesResponse: + r"""Call the list stored info types method over HTTP. + + Args: + request (~.dlp.ListStoredInfoTypesRequest): + The request object. Request message for + ListStoredInfoTypes. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListStoredInfoTypesResponse: + Response message for + ListStoredInfoTypes. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*}/storedInfoTypes', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/storedInfoTypes', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/storedInfoTypes', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/storedInfoTypes', + }, + ] + request, metadata = self._interceptor.pre_list_stored_info_types(request, metadata) + pb_request = dlp.ListStoredInfoTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListStoredInfoTypesResponse() + pb_resp = dlp.ListStoredInfoTypesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_stored_info_types(resp) + return resp + + class _RedactImage(DlpServiceRestStub): + def __hash__(self): + return hash("RedactImage") + + def __call__(self, + request: dlp.RedactImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.RedactImageResponse: + r"""Call the redact image method over HTTP. + + Args: + request (~.dlp.RedactImageRequest): + The request object. Request to search for potentially + sensitive info in an image and redact it + by covering it with a colored rectangle. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.RedactImageResponse: + Results of redacting an image. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/image:redact', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/image:redact', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_redact_image(request, metadata) + pb_request = dlp.RedactImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.RedactImageResponse() + pb_resp = dlp.RedactImageResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_redact_image(resp) + return resp + + class _ReidentifyContent(DlpServiceRestStub): + def __hash__(self): + return hash("ReidentifyContent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ReidentifyContentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ReidentifyContentResponse: + r"""Call the reidentify content method over HTTP. + + Args: + request (~.dlp.ReidentifyContentRequest): + The request object. Request to re-identify an item. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ReidentifyContentResponse: + Results of re-identifying an item. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/content:reidentify', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/content:reidentify', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_reidentify_content(request, metadata) + pb_request = dlp.ReidentifyContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ReidentifyContentResponse() + pb_resp = dlp.ReidentifyContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reidentify_content(resp) + return resp + + class _UpdateDeidentifyTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("UpdateDeidentifyTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.UpdateDeidentifyTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DeidentifyTemplate: + r"""Call the update deidentify + template method over HTTP. + + Args: + request (~.dlp.UpdateDeidentifyTemplateRequest): + The request object. Request message for + UpdateDeidentifyTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_update_deidentify_template(request, metadata) + pb_request = dlp.UpdateDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyTemplate() + pb_resp = dlp.DeidentifyTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_deidentify_template(resp) + return resp + + class _UpdateInspectTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("UpdateInspectTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.UpdateInspectTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.InspectTemplate: + r"""Call the update inspect template method over HTTP. + + Args: + request (~.dlp.UpdateInspectTemplateRequest): + The request object. Request message for + UpdateInspectTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/inspectTemplates/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_update_inspect_template(request, metadata) + pb_request = dlp.UpdateInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectTemplate() + pb_resp = dlp.InspectTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_inspect_template(resp) + return resp + + class _UpdateJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("UpdateJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.UpdateJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.JobTrigger: + r"""Call the update job trigger method over HTTP. + + Args: + request (~.dlp.UpdateJobTriggerRequest): + The request object. Request message for UpdateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/jobTriggers/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_update_job_trigger(request, metadata) + pb_request = dlp.UpdateJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.JobTrigger() + pb_resp = dlp.JobTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_job_trigger(resp) + return resp + + class _UpdateStoredInfoType(DlpServiceRestStub): + def __hash__(self): + return hash("UpdateStoredInfoType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.UpdateStoredInfoTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.StoredInfoType: + r"""Call the update stored info type method over HTTP. + + Args: + request (~.dlp.UpdateStoredInfoTypeRequest): + The request object. Request message for + UpdateStoredInfoType. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_update_stored_info_type(request, metadata) + pb_request = dlp.UpdateStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.StoredInfoType() + pb_resp = dlp.StoredInfoType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_stored_info_type(resp) + return resp + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + dlp.DlpJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ActivateJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + dlp.DlpJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + dlp.InspectTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + dlp.JobTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + dlp.StoredInfoType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + dlp.DeidentifyContentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeidentifyContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FinishDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + dlp.DlpJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + dlp.InspectTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + dlp.JobTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + dlp.StoredInfoType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + dlp.HybridInspectResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._HybridInspectDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + dlp.HybridInspectResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._HybridInspectJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + dlp.InspectContentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._InspectContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + dlp.ListDeidentifyTemplatesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDeidentifyTemplates(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + dlp.ListDlpJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDlpJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + dlp.ListInfoTypesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInfoTypes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + dlp.ListInspectTemplatesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInspectTemplates(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + dlp.ListJobTriggersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListJobTriggers(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + dlp.ListStoredInfoTypesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListStoredInfoTypes(self._session, self._host, self._interceptor) # type: ignore + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + dlp.RedactImageResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RedactImage(self._session, self._host, self._interceptor) # type: ignore + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + dlp.ReidentifyContentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ReidentifyContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + dlp.InspectTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + dlp.JobTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + dlp.StoredInfoType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'DlpServiceRestTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py new file mode 100644 index 00000000..5bc3d949 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py @@ -0,0 +1,390 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .dlp import ( + Action, + ActivateJobTriggerRequest, + AnalyzeDataSourceRiskDetails, + BoundingBox, + BucketingConfig, + ByteContentItem, + CancelDlpJobRequest, + CharacterMaskConfig, + CharsToIgnore, + Color, + Container, + ContentItem, + ContentLocation, + CreateDeidentifyTemplateRequest, + CreateDlpJobRequest, + CreateInspectTemplateRequest, + CreateJobTriggerRequest, + CreateStoredInfoTypeRequest, + CryptoDeterministicConfig, + CryptoHashConfig, + CryptoKey, + CryptoReplaceFfxFpeConfig, + DataProfileAction, + DataProfileConfigSnapshot, + DataProfileJobConfig, + DataProfileLocation, + DataProfilePubSubCondition, + DataProfilePubSubMessage, + DataRiskLevel, + DateShiftConfig, + DateTime, + DeidentifyConfig, + DeidentifyContentRequest, + DeidentifyContentResponse, + DeidentifyTemplate, + DeleteDeidentifyTemplateRequest, + DeleteDlpJobRequest, + DeleteInspectTemplateRequest, + DeleteJobTriggerRequest, + DeleteStoredInfoTypeRequest, + DlpJob, + DocumentLocation, + Error, + ExcludeByHotword, + ExcludeInfoTypes, + ExclusionRule, + FieldTransformation, + Finding, + FinishDlpJobRequest, + FixedSizeBucketingConfig, + GetDeidentifyTemplateRequest, + GetDlpJobRequest, + GetInspectTemplateRequest, + GetJobTriggerRequest, + GetStoredInfoTypeRequest, + HybridContentItem, + HybridFindingDetails, + HybridInspectDlpJobRequest, + HybridInspectJobTriggerRequest, + HybridInspectResponse, + HybridInspectStatistics, + ImageLocation, + ImageTransformations, + InfoTypeCategory, + InfoTypeDescription, + InfoTypeStats, + InfoTypeSummary, + InfoTypeTransformations, + InspectConfig, + InspectContentRequest, + InspectContentResponse, + InspectDataSourceDetails, + InspectionRule, + InspectionRuleSet, + InspectJobConfig, + InspectResult, + InspectTemplate, + JobTrigger, + KmsWrappedCryptoKey, + LargeCustomDictionaryConfig, + LargeCustomDictionaryStats, + ListDeidentifyTemplatesRequest, + ListDeidentifyTemplatesResponse, + ListDlpJobsRequest, + ListDlpJobsResponse, + ListInfoTypesRequest, + ListInfoTypesResponse, + ListInspectTemplatesRequest, + ListInspectTemplatesResponse, + ListJobTriggersRequest, + ListJobTriggersResponse, + ListStoredInfoTypesRequest, + ListStoredInfoTypesResponse, + Location, + Manual, + MetadataLocation, + OtherInfoTypeSummary, + OutputStorageConfig, + PrimitiveTransformation, + PrivacyMetric, + ProfileStatus, + QuasiId, + QuoteInfo, + Range, + RecordCondition, + RecordLocation, + RecordSuppression, + RecordTransformation, + RecordTransformations, + RedactConfig, + RedactImageRequest, + RedactImageResponse, + ReidentifyContentRequest, + ReidentifyContentResponse, + ReplaceDictionaryConfig, + ReplaceValueConfig, + ReplaceWithInfoTypeConfig, + RiskAnalysisJobConfig, + Schedule, + StatisticalTable, + StorageMetadataLabel, + StoredInfoType, + StoredInfoTypeConfig, + StoredInfoTypeStats, + StoredInfoTypeVersion, + Table, + TableDataProfile, + TableLocation, + TimePartConfig, + TransformationConfig, + TransformationDescription, + TransformationDetails, + TransformationDetailsStorageConfig, + TransformationErrorHandling, + TransformationLocation, + TransformationOverview, + TransformationResultStatus, + TransformationSummary, + TransientCryptoKey, + UnwrappedCryptoKey, + UpdateDeidentifyTemplateRequest, + UpdateInspectTemplateRequest, + UpdateJobTriggerRequest, + UpdateStoredInfoTypeRequest, + Value, + ValueFrequency, + VersionDescription, + ContentOption, + DlpJobType, + EncryptionStatus, + InfoTypeSupportedBy, + MatchingType, + MetadataType, + RelationalOperator, + ResourceVisibility, + StoredInfoTypeState, + TransformationContainerType, + TransformationResultStatusType, + TransformationType, +) +from .storage import ( + BigQueryField, + BigQueryKey, + BigQueryOptions, + BigQueryTable, + CloudStorageFileSet, + CloudStorageOptions, + CloudStoragePath, + CloudStorageRegexFileSet, + CustomInfoType, + DatastoreKey, + DatastoreOptions, + EntityId, + FieldId, + HybridOptions, + InfoType, + Key, + KindExpression, + PartitionId, + RecordKey, + SensitivityScore, + StorageConfig, + StoredType, + TableOptions, + FileType, + Likelihood, +) + +__all__ = ( + 'Action', + 'ActivateJobTriggerRequest', + 'AnalyzeDataSourceRiskDetails', + 'BoundingBox', + 'BucketingConfig', + 'ByteContentItem', + 'CancelDlpJobRequest', + 'CharacterMaskConfig', + 'CharsToIgnore', + 'Color', + 'Container', + 'ContentItem', + 'ContentLocation', + 'CreateDeidentifyTemplateRequest', + 'CreateDlpJobRequest', + 'CreateInspectTemplateRequest', + 'CreateJobTriggerRequest', + 'CreateStoredInfoTypeRequest', + 'CryptoDeterministicConfig', + 'CryptoHashConfig', + 'CryptoKey', + 'CryptoReplaceFfxFpeConfig', + 'DataProfileAction', + 'DataProfileConfigSnapshot', + 'DataProfileJobConfig', + 'DataProfileLocation', + 'DataProfilePubSubCondition', + 'DataProfilePubSubMessage', + 'DataRiskLevel', + 'DateShiftConfig', + 'DateTime', + 'DeidentifyConfig', + 'DeidentifyContentRequest', + 'DeidentifyContentResponse', + 'DeidentifyTemplate', + 'DeleteDeidentifyTemplateRequest', + 'DeleteDlpJobRequest', + 'DeleteInspectTemplateRequest', + 'DeleteJobTriggerRequest', + 'DeleteStoredInfoTypeRequest', + 'DlpJob', + 'DocumentLocation', + 'Error', + 'ExcludeByHotword', + 'ExcludeInfoTypes', + 'ExclusionRule', + 'FieldTransformation', + 'Finding', + 'FinishDlpJobRequest', + 'FixedSizeBucketingConfig', + 'GetDeidentifyTemplateRequest', + 'GetDlpJobRequest', + 'GetInspectTemplateRequest', + 'GetJobTriggerRequest', + 'GetStoredInfoTypeRequest', + 'HybridContentItem', + 'HybridFindingDetails', + 'HybridInspectDlpJobRequest', + 'HybridInspectJobTriggerRequest', + 'HybridInspectResponse', + 'HybridInspectStatistics', + 'ImageLocation', + 'ImageTransformations', + 'InfoTypeCategory', + 'InfoTypeDescription', + 'InfoTypeStats', + 'InfoTypeSummary', + 'InfoTypeTransformations', + 'InspectConfig', + 'InspectContentRequest', + 'InspectContentResponse', + 'InspectDataSourceDetails', + 'InspectionRule', + 'InspectionRuleSet', + 'InspectJobConfig', + 'InspectResult', + 'InspectTemplate', + 'JobTrigger', + 'KmsWrappedCryptoKey', + 'LargeCustomDictionaryConfig', + 'LargeCustomDictionaryStats', + 'ListDeidentifyTemplatesRequest', + 'ListDeidentifyTemplatesResponse', + 'ListDlpJobsRequest', + 'ListDlpJobsResponse', + 'ListInfoTypesRequest', + 'ListInfoTypesResponse', + 'ListInspectTemplatesRequest', + 'ListInspectTemplatesResponse', + 'ListJobTriggersRequest', + 'ListJobTriggersResponse', + 'ListStoredInfoTypesRequest', + 'ListStoredInfoTypesResponse', + 'Location', + 'Manual', + 'MetadataLocation', + 'OtherInfoTypeSummary', + 'OutputStorageConfig', + 'PrimitiveTransformation', + 'PrivacyMetric', + 'ProfileStatus', + 'QuasiId', + 'QuoteInfo', + 'Range', + 'RecordCondition', + 'RecordLocation', + 'RecordSuppression', + 'RecordTransformation', + 'RecordTransformations', + 'RedactConfig', + 'RedactImageRequest', + 'RedactImageResponse', + 'ReidentifyContentRequest', + 'ReidentifyContentResponse', + 'ReplaceDictionaryConfig', + 'ReplaceValueConfig', + 'ReplaceWithInfoTypeConfig', + 'RiskAnalysisJobConfig', + 'Schedule', + 'StatisticalTable', + 'StorageMetadataLabel', + 'StoredInfoType', + 'StoredInfoTypeConfig', + 'StoredInfoTypeStats', + 'StoredInfoTypeVersion', + 'Table', + 'TableDataProfile', + 'TableLocation', + 'TimePartConfig', + 'TransformationConfig', + 'TransformationDescription', + 'TransformationDetails', + 'TransformationDetailsStorageConfig', + 'TransformationErrorHandling', + 'TransformationLocation', + 'TransformationOverview', + 'TransformationResultStatus', + 'TransformationSummary', + 'TransientCryptoKey', + 'UnwrappedCryptoKey', + 'UpdateDeidentifyTemplateRequest', + 'UpdateInspectTemplateRequest', + 'UpdateJobTriggerRequest', + 'UpdateStoredInfoTypeRequest', + 'Value', + 'ValueFrequency', + 'VersionDescription', + 'ContentOption', + 'DlpJobType', + 'EncryptionStatus', + 'InfoTypeSupportedBy', + 'MatchingType', + 'MetadataType', + 'RelationalOperator', + 'ResourceVisibility', + 'StoredInfoTypeState', + 'TransformationContainerType', + 'TransformationResultStatusType', + 'TransformationType', + 'BigQueryField', + 'BigQueryKey', + 'BigQueryOptions', + 'BigQueryTable', + 'CloudStorageFileSet', + 'CloudStorageOptions', + 'CloudStoragePath', + 'CloudStorageRegexFileSet', + 'CustomInfoType', + 'DatastoreKey', + 'DatastoreOptions', + 'EntityId', + 'FieldId', + 'HybridOptions', + 'InfoType', + 'Key', + 'KindExpression', + 'PartitionId', + 'RecordKey', + 'SensitivityScore', + 'StorageConfig', + 'StoredType', + 'TableOptions', + 'FileType', + 'Likelihood', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py new file mode 100644 index 00000000..d82444a2 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py @@ -0,0 +1,8846 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dlp_v2.types import storage +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.privacy.dlp.v2', + manifest={ + 'TransformationResultStatusType', + 'TransformationContainerType', + 'TransformationType', + 'RelationalOperator', + 'MatchingType', + 'ContentOption', + 'MetadataType', + 'InfoTypeSupportedBy', + 'DlpJobType', + 'StoredInfoTypeState', + 'ResourceVisibility', + 'EncryptionStatus', + 'ExcludeInfoTypes', + 'ExcludeByHotword', + 'ExclusionRule', + 'InspectionRule', + 'InspectionRuleSet', + 'InspectConfig', + 'ByteContentItem', + 'ContentItem', + 'Table', + 'InspectResult', + 'Finding', + 'Location', + 'ContentLocation', + 'MetadataLocation', + 'StorageMetadataLabel', + 'DocumentLocation', + 'RecordLocation', + 'TableLocation', + 'Container', + 'Range', + 'ImageLocation', + 'BoundingBox', + 'RedactImageRequest', + 'Color', + 'RedactImageResponse', + 'DeidentifyContentRequest', + 'DeidentifyContentResponse', + 'ReidentifyContentRequest', + 'ReidentifyContentResponse', + 'InspectContentRequest', + 'InspectContentResponse', + 'OutputStorageConfig', + 'InfoTypeStats', + 'InspectDataSourceDetails', + 'HybridInspectStatistics', + 'InfoTypeDescription', + 'InfoTypeCategory', + 'VersionDescription', + 'ListInfoTypesRequest', + 'ListInfoTypesResponse', + 'RiskAnalysisJobConfig', + 'QuasiId', + 'StatisticalTable', + 'PrivacyMetric', + 'AnalyzeDataSourceRiskDetails', + 'ValueFrequency', + 'Value', + 'QuoteInfo', + 'DateTime', + 'DeidentifyConfig', + 'ImageTransformations', + 'TransformationErrorHandling', + 'PrimitiveTransformation', + 'TimePartConfig', + 'CryptoHashConfig', + 'CryptoDeterministicConfig', + 'ReplaceValueConfig', + 'ReplaceDictionaryConfig', + 'ReplaceWithInfoTypeConfig', + 'RedactConfig', + 'CharsToIgnore', + 'CharacterMaskConfig', + 'FixedSizeBucketingConfig', + 'BucketingConfig', + 'CryptoReplaceFfxFpeConfig', + 'CryptoKey', + 'TransientCryptoKey', + 'UnwrappedCryptoKey', + 'KmsWrappedCryptoKey', + 'DateShiftConfig', + 'InfoTypeTransformations', + 'FieldTransformation', + 'RecordTransformations', + 'RecordSuppression', + 'RecordCondition', + 'TransformationOverview', + 'TransformationSummary', + 'TransformationDescription', + 'TransformationDetails', + 'TransformationLocation', + 'RecordTransformation', + 'TransformationResultStatus', + 'TransformationDetailsStorageConfig', + 'Schedule', + 'Manual', + 'InspectTemplate', + 'DeidentifyTemplate', + 'Error', + 'JobTrigger', + 'Action', + 'TransformationConfig', + 'CreateInspectTemplateRequest', + 'UpdateInspectTemplateRequest', + 'GetInspectTemplateRequest', + 'ListInspectTemplatesRequest', + 'ListInspectTemplatesResponse', + 'DeleteInspectTemplateRequest', + 'CreateJobTriggerRequest', + 'ActivateJobTriggerRequest', + 'UpdateJobTriggerRequest', + 'GetJobTriggerRequest', + 'CreateDlpJobRequest', + 'ListJobTriggersRequest', + 'ListJobTriggersResponse', + 'DeleteJobTriggerRequest', + 'InspectJobConfig', + 'DataProfileAction', + 'DataProfileJobConfig', + 'DataProfileLocation', + 'DlpJob', + 'GetDlpJobRequest', + 'ListDlpJobsRequest', + 'ListDlpJobsResponse', + 'CancelDlpJobRequest', + 'FinishDlpJobRequest', + 'DeleteDlpJobRequest', + 'CreateDeidentifyTemplateRequest', + 'UpdateDeidentifyTemplateRequest', + 'GetDeidentifyTemplateRequest', + 'ListDeidentifyTemplatesRequest', + 'ListDeidentifyTemplatesResponse', + 'DeleteDeidentifyTemplateRequest', + 'LargeCustomDictionaryConfig', + 'LargeCustomDictionaryStats', + 'StoredInfoTypeConfig', + 'StoredInfoTypeStats', + 'StoredInfoTypeVersion', + 'StoredInfoType', + 'CreateStoredInfoTypeRequest', + 'UpdateStoredInfoTypeRequest', + 'GetStoredInfoTypeRequest', + 'ListStoredInfoTypesRequest', + 'ListStoredInfoTypesResponse', + 'DeleteStoredInfoTypeRequest', + 'HybridInspectJobTriggerRequest', + 'HybridInspectDlpJobRequest', + 'HybridContentItem', + 'HybridFindingDetails', + 'HybridInspectResponse', + 'DataRiskLevel', + 'DataProfileConfigSnapshot', + 'TableDataProfile', + 'ProfileStatus', + 'InfoTypeSummary', + 'OtherInfoTypeSummary', + 'DataProfilePubSubCondition', + 'DataProfilePubSubMessage', + }, +) + + +class TransformationResultStatusType(proto.Enum): + r"""Enum of possible outcomes of transformations. SUCCESS if + transformation and storing of transformation was successful, + otherwise, reason for not transforming. + + Values: + STATE_TYPE_UNSPECIFIED (0): + No description available. + INVALID_TRANSFORM (1): + This will be set when a finding could not be + transformed (i.e. outside user set bucket + range). + BIGQUERY_MAX_ROW_SIZE_EXCEEDED (2): + This will be set when a BigQuery + transformation was successful but could not be + stored back in BigQuery because the transformed + row exceeds BigQuery's max row size. + METADATA_UNRETRIEVABLE (3): + This will be set when there is a finding in + the custom metadata of a file, but at the write + time of the transformed file, this key / value + pair is unretrievable. + SUCCESS (4): + This will be set when the transformation and + storing of it is successful. + """ + STATE_TYPE_UNSPECIFIED = 0 + INVALID_TRANSFORM = 1 + BIGQUERY_MAX_ROW_SIZE_EXCEEDED = 2 + METADATA_UNRETRIEVABLE = 3 + SUCCESS = 4 + + +class TransformationContainerType(proto.Enum): + r"""Describes functionality of a given container in its original + format. + + Values: + TRANSFORM_UNKNOWN_CONTAINER (0): + No description available. + TRANSFORM_BODY (1): + No description available. + TRANSFORM_METADATA (2): + No description available. + TRANSFORM_TABLE (3): + No description available. + """ + TRANSFORM_UNKNOWN_CONTAINER = 0 + TRANSFORM_BODY = 1 + TRANSFORM_METADATA = 2 + TRANSFORM_TABLE = 3 + + +class TransformationType(proto.Enum): + r"""An enum of rules that can be used to transform a value. Can be a + record suppression, or one of the transformation rules specified + under ``PrimitiveTransformation``. + + Values: + TRANSFORMATION_TYPE_UNSPECIFIED (0): + Unused + RECORD_SUPPRESSION (1): + Record suppression + REPLACE_VALUE (2): + Replace value + REPLACE_DICTIONARY (15): + Replace value using a dictionary. + REDACT (3): + Redact + CHARACTER_MASK (4): + Character mask + CRYPTO_REPLACE_FFX_FPE (5): + FFX-FPE + FIXED_SIZE_BUCKETING (6): + Fixed size bucketing + BUCKETING (7): + Bucketing + REPLACE_WITH_INFO_TYPE (8): + Replace with info type + TIME_PART (9): + Time part + CRYPTO_HASH (10): + Crypto hash + DATE_SHIFT (12): + Date shift + CRYPTO_DETERMINISTIC_CONFIG (13): + Deterministic crypto + REDACT_IMAGE (14): + Redact image + """ + TRANSFORMATION_TYPE_UNSPECIFIED = 0 + RECORD_SUPPRESSION = 1 + REPLACE_VALUE = 2 + REPLACE_DICTIONARY = 15 + REDACT = 3 + CHARACTER_MASK = 4 + CRYPTO_REPLACE_FFX_FPE = 5 + FIXED_SIZE_BUCKETING = 6 + BUCKETING = 7 + REPLACE_WITH_INFO_TYPE = 8 + TIME_PART = 9 + CRYPTO_HASH = 10 + DATE_SHIFT = 12 + CRYPTO_DETERMINISTIC_CONFIG = 13 + REDACT_IMAGE = 14 + + +class RelationalOperator(proto.Enum): + r"""Operators available for comparing the value of fields. + + Values: + RELATIONAL_OPERATOR_UNSPECIFIED (0): + Unused + EQUAL_TO (1): + Equal. Attempts to match even with + incompatible types. + NOT_EQUAL_TO (2): + Not equal to. Attempts to match even with + incompatible types. + GREATER_THAN (3): + Greater than. + LESS_THAN (4): + Less than. + GREATER_THAN_OR_EQUALS (5): + Greater than or equals. + LESS_THAN_OR_EQUALS (6): + Less than or equals. + EXISTS (7): + Exists + """ + RELATIONAL_OPERATOR_UNSPECIFIED = 0 + EQUAL_TO = 1 + NOT_EQUAL_TO = 2 + GREATER_THAN = 3 + LESS_THAN = 4 + GREATER_THAN_OR_EQUALS = 5 + LESS_THAN_OR_EQUALS = 6 + EXISTS = 7 + + +class MatchingType(proto.Enum): + r"""Type of the match which can be applied to different ways of + matching, like Dictionary, regular expression and intersecting + with findings of another info type. + + Values: + MATCHING_TYPE_UNSPECIFIED (0): + Invalid. + MATCHING_TYPE_FULL_MATCH (1): + Full match. + - Dictionary: join of Dictionary results matched + complete finding quote - Regex: all regex + matches fill a finding quote start to end - + Exclude info type: completely inside affecting + info types findings + MATCHING_TYPE_PARTIAL_MATCH (2): + Partial match. + - Dictionary: at least one of the tokens in the + finding matches - Regex: substring of the + finding matches + - Exclude info type: intersects with affecting + info types findings + MATCHING_TYPE_INVERSE_MATCH (3): + Inverse match. + - Dictionary: no tokens in the finding match the + dictionary - Regex: finding doesn't match the + regex + - Exclude info type: no intersection with + affecting info types findings + """ + MATCHING_TYPE_UNSPECIFIED = 0 + MATCHING_TYPE_FULL_MATCH = 1 + MATCHING_TYPE_PARTIAL_MATCH = 2 + MATCHING_TYPE_INVERSE_MATCH = 3 + + +class ContentOption(proto.Enum): + r"""Deprecated and unused. + + Values: + CONTENT_UNSPECIFIED (0): + Includes entire content of a file or a data + stream. + CONTENT_TEXT (1): + Text content within the data, excluding any + metadata. + CONTENT_IMAGE (2): + Images found in the data. + """ + CONTENT_UNSPECIFIED = 0 + CONTENT_TEXT = 1 + CONTENT_IMAGE = 2 + + +class MetadataType(proto.Enum): + r"""Type of metadata containing the finding. + + Values: + METADATATYPE_UNSPECIFIED (0): + Unused + STORAGE_METADATA (2): + General file metadata provided by Cloud + Storage. + """ + METADATATYPE_UNSPECIFIED = 0 + STORAGE_METADATA = 2 + + +class InfoTypeSupportedBy(proto.Enum): + r"""Parts of the APIs which use certain infoTypes. + + Values: + ENUM_TYPE_UNSPECIFIED (0): + Unused. + INSPECT (1): + Supported by the inspect operations. + RISK_ANALYSIS (2): + Supported by the risk analysis operations. + """ + ENUM_TYPE_UNSPECIFIED = 0 + INSPECT = 1 + RISK_ANALYSIS = 2 + + +class DlpJobType(proto.Enum): + r"""An enum to represent the various types of DLP jobs. + + Values: + DLP_JOB_TYPE_UNSPECIFIED (0): + Defaults to INSPECT_JOB. + INSPECT_JOB (1): + The job inspected Google Cloud for sensitive + data. + RISK_ANALYSIS_JOB (2): + The job executed a Risk Analysis computation. + """ + DLP_JOB_TYPE_UNSPECIFIED = 0 + INSPECT_JOB = 1 + RISK_ANALYSIS_JOB = 2 + + +class StoredInfoTypeState(proto.Enum): + r"""State of a StoredInfoType version. + + Values: + STORED_INFO_TYPE_STATE_UNSPECIFIED (0): + Unused + PENDING (1): + StoredInfoType version is being created. + READY (2): + StoredInfoType version is ready for use. + FAILED (3): + StoredInfoType creation failed. All relevant error messages + are returned in the ``StoredInfoTypeVersion`` message. + INVALID (4): + StoredInfoType is no longer valid because artifacts stored + in user-controlled storage were modified. To fix an invalid + StoredInfoType, use the ``UpdateStoredInfoType`` method to + create a new version. + """ + STORED_INFO_TYPE_STATE_UNSPECIFIED = 0 + PENDING = 1 + READY = 2 + FAILED = 3 + INVALID = 4 + + +class ResourceVisibility(proto.Enum): + r"""How broadly a resource has been shared. New items may be + added over time. A higher number means more restricted. + + Values: + RESOURCE_VISIBILITY_UNSPECIFIED (0): + Unused. + RESOURCE_VISIBILITY_PUBLIC (10): + Visible to any user. + RESOURCE_VISIBILITY_RESTRICTED (20): + Visible only to specific users. + """ + RESOURCE_VISIBILITY_UNSPECIFIED = 0 + RESOURCE_VISIBILITY_PUBLIC = 10 + RESOURCE_VISIBILITY_RESTRICTED = 20 + + +class EncryptionStatus(proto.Enum): + r"""How a resource is encrypted. + + Values: + ENCRYPTION_STATUS_UNSPECIFIED (0): + Unused. + ENCRYPTION_GOOGLE_MANAGED (1): + Google manages server-side encryption keys on + your behalf. + ENCRYPTION_CUSTOMER_MANAGED (2): + Customer provides the key. + """ + ENCRYPTION_STATUS_UNSPECIFIED = 0 + ENCRYPTION_GOOGLE_MANAGED = 1 + ENCRYPTION_CUSTOMER_MANAGED = 2 + + +class ExcludeInfoTypes(proto.Message): + r"""List of excluded infoTypes. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + InfoType list in ExclusionRule rule drops a finding when it + overlaps or contained within with a finding of an infoType + from this list. For example, for + ``InspectionRuleSet.info_types`` containing + "PHONE_NUMBER"``and``\ exclusion_rule\ ``containing``\ exclude_info_types.info_types\` + with "EMAIL_ADDRESS" the phone number findings are dropped + if they overlap with EMAIL_ADDRESS finding. That leads to + "555-222-2222@example.org" to generate only a single + finding, namely email address. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + + +class ExcludeByHotword(proto.Message): + r"""The rule to exclude findings based on a hotword. For record + inspection of tables, column names are considered hotwords. An + example of this is to exclude a finding if a BigQuery column + matches a specific pattern. + + Attributes: + hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression pattern defining what + qualifies as a hotword. + proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): + Range of characters within which the entire + hotword must reside. The total length of the + window cannot exceed 1000 characters. The + windowBefore property in proximity should be set + to 1 if the hotword needs to be included in a + column header. + """ + + hotword_regex: storage.CustomInfoType.Regex = proto.Field( + proto.MESSAGE, + number=1, + message=storage.CustomInfoType.Regex, + ) + proximity: storage.CustomInfoType.DetectionRule.Proximity = proto.Field( + proto.MESSAGE, + number=2, + message=storage.CustomInfoType.DetectionRule.Proximity, + ) + + +class ExclusionRule(proto.Message): + r"""The rule that specifies conditions when findings of infoTypes + specified in ``InspectionRuleSet`` are removed from results. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): + Dictionary which defines the rule. + + This field is a member of `oneof`_ ``type``. + regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression which defines the rule. + + This field is a member of `oneof`_ ``type``. + exclude_info_types (google.cloud.dlp_v2.types.ExcludeInfoTypes): + Set of infoTypes for which findings would + affect this rule. + + This field is a member of `oneof`_ ``type``. + exclude_by_hotword (google.cloud.dlp_v2.types.ExcludeByHotword): + Drop if the hotword rule is contained in the + proximate context. For tabular data, the context + includes the column name. + + This field is a member of `oneof`_ ``type``. + matching_type (google.cloud.dlp_v2.types.MatchingType): + How the rule is applied, see MatchingType + documentation for details. + """ + + dictionary: storage.CustomInfoType.Dictionary = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.CustomInfoType.Dictionary, + ) + regex: storage.CustomInfoType.Regex = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message=storage.CustomInfoType.Regex, + ) + exclude_info_types: 'ExcludeInfoTypes' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='ExcludeInfoTypes', + ) + exclude_by_hotword: 'ExcludeByHotword' = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message='ExcludeByHotword', + ) + matching_type: 'MatchingType' = proto.Field( + proto.ENUM, + number=4, + enum='MatchingType', + ) + + +class InspectionRule(proto.Message): + r"""A single inspection rule to be applied to infoTypes, specified in + ``InspectionRuleSet``. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): + Hotword-based detection rule. + + This field is a member of `oneof`_ ``type``. + exclusion_rule (google.cloud.dlp_v2.types.ExclusionRule): + Exclusion rule. + + This field is a member of `oneof`_ ``type``. + """ + + hotword_rule: storage.CustomInfoType.DetectionRule.HotwordRule = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.CustomInfoType.DetectionRule.HotwordRule, + ) + exclusion_rule: 'ExclusionRule' = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message='ExclusionRule', + ) + + +class InspectionRuleSet(proto.Message): + r"""Rule set for modifying a set of infoTypes to alter behavior + under certain circumstances, depending on the specific details + of the rules within the set. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + List of infoTypes this rule set is applied + to. + rules (MutableSequence[google.cloud.dlp_v2.types.InspectionRule]): + Set of rules to be applied to infoTypes. The + rules are applied in order. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + rules: MutableSequence['InspectionRule'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='InspectionRule', + ) + + +class InspectConfig(proto.Message): + r"""Configuration description of the scanning process. When used with + redactContent only info_types and min_likelihood are currently used. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + Restricts what info_types to look for. The values must + correspond to InfoType values returned by ListInfoTypes or + listed at + https://cloud.google.com/dlp/docs/infotypes-reference. + + When no InfoTypes or CustomInfoTypes are specified in a + request, the system may automatically choose what detectors + to run. By default this may be all types, but may change + over time as detectors are updated. + + If you need precise control and predictability as to what + detectors are run you should specify specific InfoTypes + listed in the reference, otherwise a default list will be + used, which may change over time. + min_likelihood (google.cloud.dlp_v2.types.Likelihood): + Only returns findings equal or above this + threshold. The default is POSSIBLE. + See https://cloud.google.com/dlp/docs/likelihood + to learn more. + limits (google.cloud.dlp_v2.types.InspectConfig.FindingLimits): + Configuration to control the number of findings returned. + This is not used for data profiling. + + When redacting sensitive data from images, finding limits + don't apply. They can cause unexpected or inconsistent + results, where only some data is redacted. Don't include + finding limits in + [RedactImage][google.privacy.dlp.v2.DlpService.RedactImage] + requests. Otherwise, Cloud DLP returns an error. + include_quote (bool): + When true, a contextual quote from the data that triggered a + finding is included in the response; see + [Finding.quote][google.privacy.dlp.v2.Finding.quote]. This + is not used for data profiling. + exclude_info_types (bool): + When true, excludes type information of the + findings. This is not used for data profiling. + custom_info_types (MutableSequence[google.cloud.dlp_v2.types.CustomInfoType]): + CustomInfoTypes provided by the user. See + https://cloud.google.com/dlp/docs/creating-custom-infotypes + to learn more. + content_options (MutableSequence[google.cloud.dlp_v2.types.ContentOption]): + Deprecated and unused. + rule_set (MutableSequence[google.cloud.dlp_v2.types.InspectionRuleSet]): + Set of rules to apply to the findings for + this InspectConfig. Exclusion rules, contained + in the set are executed in the end, other rules + are executed in the order they are specified for + each info type. + """ + + class FindingLimits(proto.Message): + r"""Configuration to control the number of findings returned for + inspection. This is not used for de-identification or data + profiling. + + When redacting sensitive data from images, finding limits don't + apply. They can cause unexpected or inconsistent results, where only + some data is redacted. Don't include finding limits in + [RedactImage][google.privacy.dlp.v2.DlpService.RedactImage] + requests. Otherwise, Cloud DLP returns an error. + + Attributes: + max_findings_per_item (int): + Max number of findings that will be returned for each item + scanned. When set within ``InspectJobConfig``, the maximum + returned is 2000 regardless if this is set higher. When set + within ``InspectContentRequest``, this field is ignored. + max_findings_per_request (int): + Max number of findings that will be returned per + request/job. When set within ``InspectContentRequest``, the + maximum returned is 2000 regardless if this is set higher. + max_findings_per_info_type (MutableSequence[google.cloud.dlp_v2.types.InspectConfig.FindingLimits.InfoTypeLimit]): + Configuration of findings limit given for + specified infoTypes. + """ + + class InfoTypeLimit(proto.Message): + r"""Max findings configuration per infoType, per content item or + long running DlpJob. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Type of information the findings limit applies to. Only one + limit per info_type should be provided. If InfoTypeLimit + does not have an info_type, the DLP API applies the limit + against all info_types that are found but not specified in + another InfoTypeLimit. + max_findings (int): + Max findings limit for the given infoType. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + max_findings: int = proto.Field( + proto.INT32, + number=2, + ) + + max_findings_per_item: int = proto.Field( + proto.INT32, + number=1, + ) + max_findings_per_request: int = proto.Field( + proto.INT32, + number=2, + ) + max_findings_per_info_type: MutableSequence['InspectConfig.FindingLimits.InfoTypeLimit'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='InspectConfig.FindingLimits.InfoTypeLimit', + ) + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + min_likelihood: storage.Likelihood = proto.Field( + proto.ENUM, + number=2, + enum=storage.Likelihood, + ) + limits: FindingLimits = proto.Field( + proto.MESSAGE, + number=3, + message=FindingLimits, + ) + include_quote: bool = proto.Field( + proto.BOOL, + number=4, + ) + exclude_info_types: bool = proto.Field( + proto.BOOL, + number=5, + ) + custom_info_types: MutableSequence[storage.CustomInfoType] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=storage.CustomInfoType, + ) + content_options: MutableSequence['ContentOption'] = proto.RepeatedField( + proto.ENUM, + number=8, + enum='ContentOption', + ) + rule_set: MutableSequence['InspectionRuleSet'] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message='InspectionRuleSet', + ) + + +class ByteContentItem(proto.Message): + r"""Container for bytes to inspect or redact. + + Attributes: + type_ (google.cloud.dlp_v2.types.ByteContentItem.BytesType): + The type of data stored in the bytes string. Default will be + TEXT_UTF8. + data (bytes): + Content data to inspect or redact. + """ + class BytesType(proto.Enum): + r"""The type of data being sent for inspection. To learn more, see + `Supported file + types `__. + + Values: + BYTES_TYPE_UNSPECIFIED (0): + Unused + IMAGE (6): + Any image type. + IMAGE_JPEG (1): + jpeg + IMAGE_BMP (2): + bmp + IMAGE_PNG (3): + png + IMAGE_SVG (4): + svg + TEXT_UTF8 (5): + plain text + WORD_DOCUMENT (7): + docx, docm, dotx, dotm + PDF (8): + pdf + POWERPOINT_DOCUMENT (9): + pptx, pptm, potx, potm, pot + EXCEL_DOCUMENT (10): + xlsx, xlsm, xltx, xltm + AVRO (11): + avro + CSV (12): + csv + TSV (13): + tsv + """ + BYTES_TYPE_UNSPECIFIED = 0 + IMAGE = 6 + IMAGE_JPEG = 1 + IMAGE_BMP = 2 + IMAGE_PNG = 3 + IMAGE_SVG = 4 + TEXT_UTF8 = 5 + WORD_DOCUMENT = 7 + PDF = 8 + POWERPOINT_DOCUMENT = 9 + EXCEL_DOCUMENT = 10 + AVRO = 11 + CSV = 12 + TSV = 13 + + type_: BytesType = proto.Field( + proto.ENUM, + number=1, + enum=BytesType, + ) + data: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class ContentItem(proto.Message): + r""" + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (str): + String data to inspect or redact. + + This field is a member of `oneof`_ ``data_item``. + table (google.cloud.dlp_v2.types.Table): + Structured content for inspection. See + https://cloud.google.com/dlp/docs/inspecting-text#inspecting_a_table + to learn more. + + This field is a member of `oneof`_ ``data_item``. + byte_item (google.cloud.dlp_v2.types.ByteContentItem): + Content data to inspect or redact. Replaces ``type`` and + ``data``. + + This field is a member of `oneof`_ ``data_item``. + """ + + value: str = proto.Field( + proto.STRING, + number=3, + oneof='data_item', + ) + table: 'Table' = proto.Field( + proto.MESSAGE, + number=4, + oneof='data_item', + message='Table', + ) + byte_item: 'ByteContentItem' = proto.Field( + proto.MESSAGE, + number=5, + oneof='data_item', + message='ByteContentItem', + ) + + +class Table(proto.Message): + r"""Structured content to inspect. Up to 50,000 ``Value``\ s per request + allowed. See + https://cloud.google.com/dlp/docs/inspecting-structured-text#inspecting_a_table + to learn more. + + Attributes: + headers (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Headers of the table. + rows (MutableSequence[google.cloud.dlp_v2.types.Table.Row]): + Rows of the table. + """ + + class Row(proto.Message): + r"""Values of the row. + + Attributes: + values (MutableSequence[google.cloud.dlp_v2.types.Value]): + Individual cells. + """ + + values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + + headers: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + rows: MutableSequence[Row] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Row, + ) + + +class InspectResult(proto.Message): + r"""All the findings for a single scanned item. + + Attributes: + findings (MutableSequence[google.cloud.dlp_v2.types.Finding]): + List of findings for an item. + findings_truncated (bool): + If true, then this item might have more + findings than were returned, and the findings + returned are an arbitrary subset of all + findings. The findings list might be truncated + because the input items were too large, or + because the server reached the maximum amount of + resources allowed for a single API call. For + best results, divide the input into smaller + batches. + """ + + findings: MutableSequence['Finding'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Finding', + ) + findings_truncated: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class Finding(proto.Message): + r"""Represents a piece of potentially sensitive content. + + Attributes: + name (str): + Resource name in format + projects/{project}/locations/{location}/findings/{finding} + Populated only when viewing persisted findings. + quote (str): + The content that was found. Even if the content is not + textual, it may be converted to a textual representation + here. Provided if ``include_quote`` is true and the finding + is less than or equal to 4096 bytes long. If the finding + exceeds 4096 bytes in length, the quote may be omitted. + info_type (google.cloud.dlp_v2.types.InfoType): + The type of content that might have been found. Provided if + ``excluded_types`` is false. + likelihood (google.cloud.dlp_v2.types.Likelihood): + Confidence of how likely it is that the ``info_type`` is + correct. + location (google.cloud.dlp_v2.types.Location): + Where the content was found. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp when finding was detected. + quote_info (google.cloud.dlp_v2.types.QuoteInfo): + Contains data parsed from quotes. Only populated if + include_quote was set to true and a supported infoType was + requested. Currently supported infoTypes: DATE, + DATE_OF_BIRTH and TIME. + resource_name (str): + The job that stored the finding. + trigger_name (str): + Job trigger name, if applicable, for this + finding. + labels (MutableMapping[str, str]): + The labels associated with this ``Finding``. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + job_create_time (google.protobuf.timestamp_pb2.Timestamp): + Time the job started that produced this + finding. + job_name (str): + The job that stored the finding. + finding_id (str): + The unique finding id. + """ + + name: str = proto.Field( + proto.STRING, + number=14, + ) + quote: str = proto.Field( + proto.STRING, + number=1, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + message=storage.InfoType, + ) + likelihood: storage.Likelihood = proto.Field( + proto.ENUM, + number=3, + enum=storage.Likelihood, + ) + location: 'Location' = proto.Field( + proto.MESSAGE, + number=4, + message='Location', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + quote_info: 'QuoteInfo' = proto.Field( + proto.MESSAGE, + number=7, + message='QuoteInfo', + ) + resource_name: str = proto.Field( + proto.STRING, + number=8, + ) + trigger_name: str = proto.Field( + proto.STRING, + number=9, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=10, + ) + job_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + job_name: str = proto.Field( + proto.STRING, + number=13, + ) + finding_id: str = proto.Field( + proto.STRING, + number=15, + ) + + +class Location(proto.Message): + r"""Specifies the location of the finding. + + Attributes: + byte_range (google.cloud.dlp_v2.types.Range): + Zero-based byte offsets delimiting the + finding. These are relative to the finding's + containing element. Note that when the content + is not textual, this references the UTF-8 + encoded textual representation of the content. + Omitted if content is an image. + codepoint_range (google.cloud.dlp_v2.types.Range): + Unicode character offsets delimiting the + finding. These are relative to the finding's + containing element. Provided when the content is + text. + content_locations (MutableSequence[google.cloud.dlp_v2.types.ContentLocation]): + List of nested objects pointing to the + precise location of the finding within the file + or record. + container (google.cloud.dlp_v2.types.Container): + Information about the container where this + finding occurred, if available. + """ + + byte_range: 'Range' = proto.Field( + proto.MESSAGE, + number=1, + message='Range', + ) + codepoint_range: 'Range' = proto.Field( + proto.MESSAGE, + number=2, + message='Range', + ) + content_locations: MutableSequence['ContentLocation'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='ContentLocation', + ) + container: 'Container' = proto.Field( + proto.MESSAGE, + number=8, + message='Container', + ) + + +class ContentLocation(proto.Message): + r"""Precise location of the finding within a document, record, + image, or metadata container. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + container_name (str): + Name of the container where the finding is located. The top + level name is the source file name or table name. Names of + some common storage containers are formatted as follows: + + - BigQuery tables: ``{project_id}:{dataset_id}.{table_id}`` + - Cloud Storage files: ``gs://{bucket}/{path}`` + - Datastore namespace: {namespace} + + Nested names could be absent if the embedded object has no + string identifier (for example, an image contained within a + document). + record_location (google.cloud.dlp_v2.types.RecordLocation): + Location within a row or record of a database + table. + + This field is a member of `oneof`_ ``location``. + image_location (google.cloud.dlp_v2.types.ImageLocation): + Location within an image's pixels. + + This field is a member of `oneof`_ ``location``. + document_location (google.cloud.dlp_v2.types.DocumentLocation): + Location data for document files. + + This field is a member of `oneof`_ ``location``. + metadata_location (google.cloud.dlp_v2.types.MetadataLocation): + Location within the metadata for inspected + content. + + This field is a member of `oneof`_ ``location``. + container_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Finding container modification timestamp, if applicable. For + Cloud Storage, this field contains the last file + modification timestamp. For a BigQuery table, this field + contains the last_modified_time property. For Datastore, + this field isn't populated. + container_version (str): + Finding container version, if available + ("generation" for Cloud Storage). + """ + + container_name: str = proto.Field( + proto.STRING, + number=1, + ) + record_location: 'RecordLocation' = proto.Field( + proto.MESSAGE, + number=2, + oneof='location', + message='RecordLocation', + ) + image_location: 'ImageLocation' = proto.Field( + proto.MESSAGE, + number=3, + oneof='location', + message='ImageLocation', + ) + document_location: 'DocumentLocation' = proto.Field( + proto.MESSAGE, + number=5, + oneof='location', + message='DocumentLocation', + ) + metadata_location: 'MetadataLocation' = proto.Field( + proto.MESSAGE, + number=8, + oneof='location', + message='MetadataLocation', + ) + container_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + container_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class MetadataLocation(proto.Message): + r"""Metadata Location + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.dlp_v2.types.MetadataType): + Type of metadata containing the finding. + storage_label (google.cloud.dlp_v2.types.StorageMetadataLabel): + Storage metadata. + + This field is a member of `oneof`_ ``label``. + """ + + type_: 'MetadataType' = proto.Field( + proto.ENUM, + number=1, + enum='MetadataType', + ) + storage_label: 'StorageMetadataLabel' = proto.Field( + proto.MESSAGE, + number=3, + oneof='label', + message='StorageMetadataLabel', + ) + + +class StorageMetadataLabel(proto.Message): + r"""Storage metadata label to indicate which metadata entry + contains findings. + + Attributes: + key (str): + + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DocumentLocation(proto.Message): + r"""Location of a finding within a document. + + Attributes: + file_offset (int): + Offset of the line, from the beginning of the + file, where the finding is located. + """ + + file_offset: int = proto.Field( + proto.INT64, + number=1, + ) + + +class RecordLocation(proto.Message): + r"""Location of a finding within a row or record. + + Attributes: + record_key (google.cloud.dlp_v2.types.RecordKey): + Key of the finding. + field_id (google.cloud.dlp_v2.types.FieldId): + Field id of the field containing the finding. + table_location (google.cloud.dlp_v2.types.TableLocation): + Location within a ``ContentItem.Table``. + """ + + record_key: storage.RecordKey = proto.Field( + proto.MESSAGE, + number=1, + message=storage.RecordKey, + ) + field_id: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + table_location: 'TableLocation' = proto.Field( + proto.MESSAGE, + number=3, + message='TableLocation', + ) + + +class TableLocation(proto.Message): + r"""Location of a finding within a table. + + Attributes: + row_index (int): + The zero-based index of the row where the finding is + located. Only populated for resources that have a natural + ordering, not BigQuery. In BigQuery, to identify the row a + finding came from, populate + BigQueryOptions.identifying_fields with your primary key + column names and when you store the findings the value of + those columns will be stored inside of Finding. + """ + + row_index: int = proto.Field( + proto.INT64, + number=1, + ) + + +class Container(proto.Message): + r"""Represents a container that may contain DLP findings. + Examples of a container include a file, table, or database + record. + + Attributes: + type_ (str): + Container type, for example BigQuery or Cloud + Storage. + project_id (str): + Project where the finding was found. + Can be different from the project that owns the + finding. + full_path (str): + A string representation of the full container + name. Examples: + - BigQuery: 'Project:DataSetId.TableId' + - Cloud Storage: + 'gs://Bucket/folders/filename.txt' + root_path (str): + The root of the container. Examples: + + - For BigQuery table ``project_id:dataset_id.table_id``, + the root is ``dataset_id`` + - For Cloud Storage file + ``gs://bucket/folder/filename.txt``, the root is + ``gs://bucket`` + relative_path (str): + The rest of the path after the root. Examples: + + - For BigQuery table ``project_id:dataset_id.table_id``, + the relative path is ``table_id`` + - For Cloud Storage file + ``gs://bucket/folder/filename.txt``, the relative path is + ``folder/filename.txt`` + update_time (google.protobuf.timestamp_pb2.Timestamp): + Findings container modification timestamp, if applicable. + For Cloud Storage, this field contains the last file + modification timestamp. For a BigQuery table, this field + contains the last_modified_time property. For Datastore, + this field isn't populated. + version (str): + Findings container version, if available + ("generation" for Cloud Storage). + """ + + type_: str = proto.Field( + proto.STRING, + number=1, + ) + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + full_path: str = proto.Field( + proto.STRING, + number=3, + ) + root_path: str = proto.Field( + proto.STRING, + number=4, + ) + relative_path: str = proto.Field( + proto.STRING, + number=5, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class Range(proto.Message): + r"""Generic half-open interval [start, end) + + Attributes: + start (int): + Index of the first character of the range + (inclusive). + end (int): + Index of the last character of the range + (exclusive). + """ + + start: int = proto.Field( + proto.INT64, + number=1, + ) + end: int = proto.Field( + proto.INT64, + number=2, + ) + + +class ImageLocation(proto.Message): + r"""Location of the finding within an image. + + Attributes: + bounding_boxes (MutableSequence[google.cloud.dlp_v2.types.BoundingBox]): + Bounding boxes locating the pixels within the + image containing the finding. + """ + + bounding_boxes: MutableSequence['BoundingBox'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='BoundingBox', + ) + + +class BoundingBox(proto.Message): + r"""Bounding box encompassing detected text within an image. + + Attributes: + top (int): + Top coordinate of the bounding box. (0,0) is + upper left. + left (int): + Left coordinate of the bounding box. (0,0) is + upper left. + width (int): + Width of the bounding box in pixels. + height (int): + Height of the bounding box in pixels. + """ + + top: int = proto.Field( + proto.INT32, + number=1, + ) + left: int = proto.Field( + proto.INT32, + number=2, + ) + width: int = proto.Field( + proto.INT32, + number=3, + ) + height: int = proto.Field( + proto.INT32, + number=4, + ) + + +class RedactImageRequest(proto.Message): + r"""Request to search for potentially sensitive info in an image + and redact it by covering it with a colored rectangle. + + Attributes: + parent (str): + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + location_id (str): + Deprecated. This field has no effect. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. + image_redaction_configs (MutableSequence[google.cloud.dlp_v2.types.RedactImageRequest.ImageRedactionConfig]): + The configuration for specifying what content + to redact from images. + include_findings (bool): + Whether the response should include findings + along with the redacted image. + byte_item (google.cloud.dlp_v2.types.ByteContentItem): + The content must be PNG, JPEG, SVG or BMP. + """ + + class ImageRedactionConfig(proto.Message): + r"""Configuration for determining how redaction of images should + occur. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Only one per info_type should be provided per request. If + not specified, and redact_all_text is false, the DLP API + will redact all text that it matches against all info_types + that are found, but not specified in another + ImageRedactionConfig. + + This field is a member of `oneof`_ ``target``. + redact_all_text (bool): + If true, all text found in the image, regardless whether it + matches an info_type, is redacted. Only one should be + provided. + + This field is a member of `oneof`_ ``target``. + redaction_color (google.cloud.dlp_v2.types.Color): + The color to use when redacting content from + an image. If not specified, the default is + black. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + oneof='target', + message=storage.InfoType, + ) + redact_all_text: bool = proto.Field( + proto.BOOL, + number=2, + oneof='target', + ) + redaction_color: 'Color' = proto.Field( + proto.MESSAGE, + number=3, + message='Color', + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + location_id: str = proto.Field( + proto.STRING, + number=8, + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + image_redaction_configs: MutableSequence[ImageRedactionConfig] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=ImageRedactionConfig, + ) + include_findings: bool = proto.Field( + proto.BOOL, + number=6, + ) + byte_item: 'ByteContentItem' = proto.Field( + proto.MESSAGE, + number=7, + message='ByteContentItem', + ) + + +class Color(proto.Message): + r"""Represents a color in the RGB color space. + + Attributes: + red (float): + The amount of red in the color as a value in the interval + [0, 1]. + green (float): + The amount of green in the color as a value in the interval + [0, 1]. + blue (float): + The amount of blue in the color as a value in the interval + [0, 1]. + """ + + red: float = proto.Field( + proto.FLOAT, + number=1, + ) + green: float = proto.Field( + proto.FLOAT, + number=2, + ) + blue: float = proto.Field( + proto.FLOAT, + number=3, + ) + + +class RedactImageResponse(proto.Message): + r"""Results of redacting an image. + + Attributes: + redacted_image (bytes): + The redacted image. The type will be the same + as the original image. + extracted_text (str): + If an image was being inspected and the InspectConfig's + include_quote was set to true, then this field will include + all text, if any, that was found in the image. + inspect_result (google.cloud.dlp_v2.types.InspectResult): + The findings. Populated when include_findings in the request + is true. + """ + + redacted_image: bytes = proto.Field( + proto.BYTES, + number=1, + ) + extracted_text: str = proto.Field( + proto.STRING, + number=2, + ) + inspect_result: 'InspectResult' = proto.Field( + proto.MESSAGE, + number=3, + message='InspectResult', + ) + + +class DeidentifyContentRequest(proto.Message): + r"""Request to de-identify a ContentItem. + + Attributes: + parent (str): + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): + Configuration for the de-identification of the content item. + Items specified here will override the template referenced + by the deidentify_template_name argument. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. Items specified here will + override the template referenced by the + inspect_template_name argument. + item (google.cloud.dlp_v2.types.ContentItem): + The item to de-identify. Will be treated as text. + + This value must be of type + [Table][google.privacy.dlp.v2.Table] if your + [deidentify_config][google.privacy.dlp.v2.DeidentifyContentRequest.deidentify_config] + is a + [RecordTransformations][google.privacy.dlp.v2.RecordTransformations] + object. + inspect_template_name (str): + Template to use. Any configuration directly specified in + inspect_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + deidentify_template_name (str): + Template to use. Any configuration directly specified in + deidentify_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + deidentify_config: 'DeidentifyConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyConfig', + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='InspectConfig', + ) + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=4, + message='ContentItem', + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=5, + ) + deidentify_template_name: str = proto.Field( + proto.STRING, + number=6, + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class DeidentifyContentResponse(proto.Message): + r"""Results of de-identifying a ContentItem. + + Attributes: + item (google.cloud.dlp_v2.types.ContentItem): + The de-identified item. + overview (google.cloud.dlp_v2.types.TransformationOverview): + An overview of the changes that were made on the ``item``. + """ + + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=1, + message='ContentItem', + ) + overview: 'TransformationOverview' = proto.Field( + proto.MESSAGE, + number=2, + message='TransformationOverview', + ) + + +class ReidentifyContentRequest(proto.Message): + r"""Request to re-identify an item. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + reidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): + Configuration for the re-identification of the content item. + This field shares the same proto message type that is used + for de-identification, however its usage here is for the + reversal of the previous de-identification. + Re-identification is performed by examining the + transformations used to de-identify the items and executing + the reverse. This requires that only reversible + transformations be provided here. The reversible + transformations are: + + - ``CryptoDeterministicConfig`` + - ``CryptoReplaceFfxFpeConfig`` + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. + item (google.cloud.dlp_v2.types.ContentItem): + The item to re-identify. Will be treated as + text. + inspect_template_name (str): + Template to use. Any configuration directly specified in + ``inspect_config`` will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + reidentify_template_name (str): + Template to use. References an instance of + ``DeidentifyTemplate``. Any configuration directly specified + in ``reidentify_config`` or ``inspect_config`` will override + those set in the template. The ``DeidentifyTemplate`` used + must include only reversible transformations. Singular + fields that are set in this request will replace their + corresponding fields in the template. Repeated fields are + appended. Singular sub-messages and groups are recursively + merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + reidentify_config: 'DeidentifyConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyConfig', + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='InspectConfig', + ) + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=4, + message='ContentItem', + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=5, + ) + reidentify_template_name: str = proto.Field( + proto.STRING, + number=6, + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ReidentifyContentResponse(proto.Message): + r"""Results of re-identifying an item. + + Attributes: + item (google.cloud.dlp_v2.types.ContentItem): + The re-identified item. + overview (google.cloud.dlp_v2.types.TransformationOverview): + An overview of the changes that were made to the ``item``. + """ + + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=1, + message='ContentItem', + ) + overview: 'TransformationOverview' = proto.Field( + proto.MESSAGE, + number=2, + message='TransformationOverview', + ) + + +class InspectContentRequest(proto.Message): + r"""Request to search for potentially sensitive info in a + ContentItem. + + Attributes: + parent (str): + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. What specified here will + override the template referenced by the + inspect_template_name argument. + item (google.cloud.dlp_v2.types.ContentItem): + The item to inspect. + inspect_template_name (str): + Template to use. Any configuration directly specified in + inspect_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=3, + message='ContentItem', + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class InspectContentResponse(proto.Message): + r"""Results of inspecting an item. + + Attributes: + result (google.cloud.dlp_v2.types.InspectResult): + The findings. + """ + + result: 'InspectResult' = proto.Field( + proto.MESSAGE, + number=1, + message='InspectResult', + ) + + +class OutputStorageConfig(proto.Message): + r"""Cloud repository for storing output. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Store findings in an existing table or a new table in an + existing dataset. If table_id is not set a new one will be + generated for you with the following format: + dlp_googleapis_yyyy_mm_dd_[dlp_job_id]. Pacific time zone + will be used for generating the date details. + + For Inspect, each column in an existing output table must + have the same name, type, and mode of a field in the + ``Finding`` object. + + For Risk, an existing output table should be the output of a + previous Risk analysis job run on the same source table, + with the same privacy metric and quasi-identifiers. Risk + jobs that analyze the same table but compute a different + privacy metric, or use different sets of quasi-identifiers, + cannot store their results in the same table. + + This field is a member of `oneof`_ ``type``. + output_schema (google.cloud.dlp_v2.types.OutputStorageConfig.OutputSchema): + Schema used for writing the findings for Inspect jobs. This + field is only used for Inspect and must be unspecified for + Risk jobs. Columns are derived from the ``Finding`` object. + If appending to an existing table, any columns from the + predefined schema that are missing will be added. No columns + in the existing table will be deleted. + + If unspecified, then all available columns will be used for + a new table or an (existing) table with no schema, and no + changes will be made to an existing table that has a schema. + Only for use with external storage. + """ + class OutputSchema(proto.Enum): + r"""Predefined schemas for storing findings. + Only for use with external storage. + + Values: + OUTPUT_SCHEMA_UNSPECIFIED (0): + Unused. + BASIC_COLUMNS (1): + Basic schema including only ``info_type``, ``quote``, + ``certainty``, and ``timestamp``. + GCS_COLUMNS (2): + Schema tailored to findings from scanning + Cloud Storage. + DATASTORE_COLUMNS (3): + Schema tailored to findings from scanning + Google Datastore. + BIG_QUERY_COLUMNS (4): + Schema tailored to findings from scanning + Google BigQuery. + ALL_COLUMNS (5): + Schema containing all columns. + """ + OUTPUT_SCHEMA_UNSPECIFIED = 0 + BASIC_COLUMNS = 1 + GCS_COLUMNS = 2 + DATASTORE_COLUMNS = 3 + BIG_QUERY_COLUMNS = 4 + ALL_COLUMNS = 5 + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.BigQueryTable, + ) + output_schema: OutputSchema = proto.Field( + proto.ENUM, + number=3, + enum=OutputSchema, + ) + + +class InfoTypeStats(proto.Message): + r"""Statistics regarding a specific InfoType. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + The type of finding this stat is for. + count (int): + Number of findings for this infoType. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + count: int = proto.Field( + proto.INT64, + number=2, + ) + + +class InspectDataSourceDetails(proto.Message): + r"""The results of an inspect DataSource job. + + Attributes: + requested_options (google.cloud.dlp_v2.types.InspectDataSourceDetails.RequestedOptions): + The configuration used for this job. + result (google.cloud.dlp_v2.types.InspectDataSourceDetails.Result): + A summary of the outcome of this inspection + job. + """ + + class RequestedOptions(proto.Message): + r"""Snapshot of the inspection configuration. + + Attributes: + snapshot_inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + If run with an InspectTemplate, a snapshot of + its state at the time of this run. + job_config (google.cloud.dlp_v2.types.InspectJobConfig): + Inspect config. + """ + + snapshot_inspect_template: 'InspectTemplate' = proto.Field( + proto.MESSAGE, + number=1, + message='InspectTemplate', + ) + job_config: 'InspectJobConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='InspectJobConfig', + ) + + class Result(proto.Message): + r"""All result fields mentioned below are updated while the job + is processing. + + Attributes: + processed_bytes (int): + Total size in bytes that were processed. + total_estimated_bytes (int): + Estimate of the number of bytes to process. + info_type_stats (MutableSequence[google.cloud.dlp_v2.types.InfoTypeStats]): + Statistics of how many instances of each info + type were found during inspect job. + hybrid_stats (google.cloud.dlp_v2.types.HybridInspectStatistics): + Statistics related to the processing of + hybrid inspect. + """ + + processed_bytes: int = proto.Field( + proto.INT64, + number=1, + ) + total_estimated_bytes: int = proto.Field( + proto.INT64, + number=2, + ) + info_type_stats: MutableSequence['InfoTypeStats'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='InfoTypeStats', + ) + hybrid_stats: 'HybridInspectStatistics' = proto.Field( + proto.MESSAGE, + number=7, + message='HybridInspectStatistics', + ) + + requested_options: RequestedOptions = proto.Field( + proto.MESSAGE, + number=2, + message=RequestedOptions, + ) + result: Result = proto.Field( + proto.MESSAGE, + number=3, + message=Result, + ) + + +class HybridInspectStatistics(proto.Message): + r"""Statistics related to processing hybrid inspect requests. + + Attributes: + processed_count (int): + The number of hybrid inspection requests + processed within this job. + aborted_count (int): + The number of hybrid inspection requests + aborted because the job ran out of quota or was + ended before they could be processed. + pending_count (int): + The number of hybrid requests currently being processed. + Only populated when called via method ``getDlpJob``. A burst + of traffic may cause hybrid inspect requests to be enqueued. + Processing will take place as quickly as possible, but + resource limitations may impact how long a request is + enqueued for. + """ + + processed_count: int = proto.Field( + proto.INT64, + number=1, + ) + aborted_count: int = proto.Field( + proto.INT64, + number=2, + ) + pending_count: int = proto.Field( + proto.INT64, + number=3, + ) + + +class InfoTypeDescription(proto.Message): + r"""InfoType description. + + Attributes: + name (str): + Internal name of the infoType. + display_name (str): + Human readable form of the infoType name. + supported_by (MutableSequence[google.cloud.dlp_v2.types.InfoTypeSupportedBy]): + Which parts of the API supports this + InfoType. + description (str): + Description of the infotype. Translated when + language is provided in the request. + versions (MutableSequence[google.cloud.dlp_v2.types.VersionDescription]): + A list of available versions for the + infotype. + categories (MutableSequence[google.cloud.dlp_v2.types.InfoTypeCategory]): + The category of the infoType. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + supported_by: MutableSequence['InfoTypeSupportedBy'] = proto.RepeatedField( + proto.ENUM, + number=3, + enum='InfoTypeSupportedBy', + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + versions: MutableSequence['VersionDescription'] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message='VersionDescription', + ) + categories: MutableSequence['InfoTypeCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message='InfoTypeCategory', + ) + + +class InfoTypeCategory(proto.Message): + r"""Classification of infoTypes to organize them according to + geographic location, industry, and data type. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + location_category (google.cloud.dlp_v2.types.InfoTypeCategory.LocationCategory): + The region or country that issued the ID or + document represented by the infoType. + + This field is a member of `oneof`_ ``category``. + industry_category (google.cloud.dlp_v2.types.InfoTypeCategory.IndustryCategory): + The group of relevant businesses where this + infoType is commonly used + + This field is a member of `oneof`_ ``category``. + type_category (google.cloud.dlp_v2.types.InfoTypeCategory.TypeCategory): + The class of identifiers where this infoType + belongs + + This field is a member of `oneof`_ ``category``. + """ + class LocationCategory(proto.Enum): + r"""Enum of the current locations. + We might add more locations in the future. + + Values: + LOCATION_UNSPECIFIED (0): + Unused location + GLOBAL (1): + The infoType is not issued by or tied to a + specific region, but is used almost everywhere. + ARGENTINA (2): + The infoType is typically used in Argentina. + AUSTRALIA (3): + The infoType is typically used in Australia. + BELGIUM (4): + The infoType is typically used in Belgium. + BRAZIL (5): + The infoType is typically used in Brazil. + CANADA (6): + The infoType is typically used in Canada. + CHILE (7): + The infoType is typically used in Chile. + CHINA (8): + The infoType is typically used in China. + COLOMBIA (9): + The infoType is typically used in Colombia. + DENMARK (10): + The infoType is typically used in Denmark. + FRANCE (11): + The infoType is typically used in France. + FINLAND (12): + The infoType is typically used in Finland. + GERMANY (13): + The infoType is typically used in Germany. + HONG_KONG (14): + The infoType is typically used in Hong Kong. + INDIA (15): + The infoType is typically used in India. + INDONESIA (16): + The infoType is typically used in Indonesia. + IRELAND (17): + The infoType is typically used in Ireland. + ISRAEL (18): + The infoType is typically used in Israel. + ITALY (19): + The infoType is typically used in Italy. + JAPAN (20): + The infoType is typically used in Japan. + KOREA (21): + The infoType is typically used in Korea. + MEXICO (22): + The infoType is typically used in Mexico. + THE_NETHERLANDS (23): + The infoType is typically used in the + Netherlands. + NORWAY (24): + The infoType is typically used in Norway. + PARAGUAY (25): + The infoType is typically used in Paraguay. + PERU (26): + The infoType is typically used in Peru. + POLAND (27): + The infoType is typically used in Poland. + PORTUGAL (28): + The infoType is typically used in Portugal. + SINGAPORE (29): + The infoType is typically used in Singapore. + SOUTH_AFRICA (30): + The infoType is typically used in South + Africa. + SPAIN (31): + The infoType is typically used in Spain. + SWEDEN (32): + The infoType is typically used in Sweden. + TAIWAN (33): + The infoType is typically used in Taiwan. + THAILAND (34): + The infoType is typically used in Thailand. + TURKEY (35): + The infoType is typically used in Turkey. + UNITED_KINGDOM (36): + The infoType is typically used in the United + Kingdom. + UNITED_STATES (37): + The infoType is typically used in the United + States. + URUGUAY (38): + The infoType is typically used in Uruguay. + VENEZUELA (39): + The infoType is typically used in Venezuela. + INTERNAL (40): + The infoType is typically used in Google + internally. + NEW_ZEALAND (41): + The infoType is typically used in New + Zealand. + """ + LOCATION_UNSPECIFIED = 0 + GLOBAL = 1 + ARGENTINA = 2 + AUSTRALIA = 3 + BELGIUM = 4 + BRAZIL = 5 + CANADA = 6 + CHILE = 7 + CHINA = 8 + COLOMBIA = 9 + DENMARK = 10 + FRANCE = 11 + FINLAND = 12 + GERMANY = 13 + HONG_KONG = 14 + INDIA = 15 + INDONESIA = 16 + IRELAND = 17 + ISRAEL = 18 + ITALY = 19 + JAPAN = 20 + KOREA = 21 + MEXICO = 22 + THE_NETHERLANDS = 23 + NORWAY = 24 + PARAGUAY = 25 + PERU = 26 + POLAND = 27 + PORTUGAL = 28 + SINGAPORE = 29 + SOUTH_AFRICA = 30 + SPAIN = 31 + SWEDEN = 32 + TAIWAN = 33 + THAILAND = 34 + TURKEY = 35 + UNITED_KINGDOM = 36 + UNITED_STATES = 37 + URUGUAY = 38 + VENEZUELA = 39 + INTERNAL = 40 + NEW_ZEALAND = 41 + + class IndustryCategory(proto.Enum): + r"""Enum of the current industries in the category. + We might add more industries in the future. + + Values: + INDUSTRY_UNSPECIFIED (0): + Unused industry + FINANCE (1): + The infoType is typically used in the finance + industry. + HEALTH (2): + The infoType is typically used in the health + industry. + TELECOMMUNICATIONS (3): + The infoType is typically used in the + telecommunications industry. + """ + INDUSTRY_UNSPECIFIED = 0 + FINANCE = 1 + HEALTH = 2 + TELECOMMUNICATIONS = 3 + + class TypeCategory(proto.Enum): + r"""Enum of the current types in the category. + We might add more types in the future. + + Values: + TYPE_UNSPECIFIED (0): + Unused type + PII (1): + Personally identifiable information, for + example, a name or phone number + SPII (2): + Personally identifiable information that is + especially sensitive, for example, a passport + number. + DEMOGRAPHIC (3): + Attributes that can partially identify + someone, especially in combination with other + attributes, like age, height, and gender. + CREDENTIAL (4): + Confidential or secret information, for + example, a password. + GOVERNMENT_ID (5): + An identification document issued by a + government. + DOCUMENT (6): + A document, for example, a resume or source + code. + CONTEXTUAL_INFORMATION (7): + Information that is not sensitive on its own, + but provides details about the circumstances + surrounding an entity or an event. + """ + TYPE_UNSPECIFIED = 0 + PII = 1 + SPII = 2 + DEMOGRAPHIC = 3 + CREDENTIAL = 4 + GOVERNMENT_ID = 5 + DOCUMENT = 6 + CONTEXTUAL_INFORMATION = 7 + + location_category: LocationCategory = proto.Field( + proto.ENUM, + number=1, + oneof='category', + enum=LocationCategory, + ) + industry_category: IndustryCategory = proto.Field( + proto.ENUM, + number=2, + oneof='category', + enum=IndustryCategory, + ) + type_category: TypeCategory = proto.Field( + proto.ENUM, + number=3, + oneof='category', + enum=TypeCategory, + ) + + +class VersionDescription(proto.Message): + r"""Details about each available version for an infotype. + + Attributes: + version (str): + Name of the version + description (str): + Description of the version. + """ + + version: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListInfoTypesRequest(proto.Message): + r"""Request for the list of infoTypes. + + Attributes: + parent (str): + The parent resource name. + + The format of this value is as follows: + + :: + + locations/LOCATION_ID + language_code (str): + BCP-47 language code for localized infoType + friendly names. If omitted, or if localized + strings are not available, en-US strings will be + returned. + filter (str): + filter to only return infoTypes supported by certain parts + of the API. Defaults to supported_by=INSPECT. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=4, + ) + language_code: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + location_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListInfoTypesResponse(proto.Message): + r"""Response to the ListInfoTypes request. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoTypeDescription]): + Set of sensitive infoTypes. + """ + + info_types: MutableSequence['InfoTypeDescription'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='InfoTypeDescription', + ) + + +class RiskAnalysisJobConfig(proto.Message): + r"""Configuration for a risk analysis job. See + https://cloud.google.com/dlp/docs/concepts-risk-analysis to + learn more. + + Attributes: + privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): + Privacy metric to compute. + source_table (google.cloud.dlp_v2.types.BigQueryTable): + Input dataset to compute metrics over. + actions (MutableSequence[google.cloud.dlp_v2.types.Action]): + Actions to execute at the completion of the + job. Are executed in the order provided. + """ + + privacy_metric: 'PrivacyMetric' = proto.Field( + proto.MESSAGE, + number=1, + message='PrivacyMetric', + ) + source_table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=2, + message=storage.BigQueryTable, + ) + actions: MutableSequence['Action'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='Action', + ) + + +class QuasiId(proto.Message): + r"""A column with a semantic tag attached. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Required. Identifies the column. + info_type (google.cloud.dlp_v2.types.InfoType): + A column can be tagged with a InfoType to use the relevant + public dataset as a statistical model of population, if + available. We currently support US ZIP codes, region codes, + ages and genders. To programmatically obtain the list of + supported InfoTypes, use ListInfoTypes with the + supported_by=RISK_ANALYSIS filter. + + This field is a member of `oneof`_ ``tag``. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + + This field is a member of `oneof`_ ``tag``. + inferred (google.protobuf.empty_pb2.Empty): + If no semantic tag is indicated, we infer the + statistical model from the distribution of + values in the input data + + This field is a member of `oneof`_ ``tag``. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + oneof='tag', + message=storage.InfoType, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=3, + oneof='tag', + ) + inferred: empty_pb2.Empty = proto.Field( + proto.MESSAGE, + number=4, + oneof='tag', + message=empty_pb2.Empty, + ) + + +class StatisticalTable(proto.Message): + r"""An auxiliary table containing statistical information on the + relative frequency of different quasi-identifiers values. It has + one or several quasi-identifiers columns, and one column that + indicates the relative frequency of each quasi-identifier tuple. + If a tuple is present in the data but not in the auxiliary + table, the corresponding relative frequency is assumed to be + zero (and thus, the tuple is highly reidentifiable). + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Required. Auxiliary table location. + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.StatisticalTable.QuasiIdentifierField]): + Required. Quasi-identifier columns. + relative_frequency (google.cloud.dlp_v2.types.FieldId): + Required. The relative frequency column must + contain a floating-point number between 0 and 1 + (inclusive). Null values are assumed to be zero. + """ + + class QuasiIdentifierField(proto.Message): + r"""A quasi-identifier column has a custom_tag, used to know which + column in the data corresponds to which column in the statistical + model. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Identifies the column. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=2, + ) + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=3, + message=storage.BigQueryTable, + ) + quasi_ids: MutableSequence[QuasiIdentifierField] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=QuasiIdentifierField, + ) + relative_frequency: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + + +class PrivacyMetric(proto.Message): + r"""Privacy metric to compute for reidentification risk analysis. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + numerical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.NumericalStatsConfig): + Numerical stats + + This field is a member of `oneof`_ ``type``. + categorical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.CategoricalStatsConfig): + Categorical stats + + This field is a member of `oneof`_ ``type``. + k_anonymity_config (google.cloud.dlp_v2.types.PrivacyMetric.KAnonymityConfig): + K-anonymity + + This field is a member of `oneof`_ ``type``. + l_diversity_config (google.cloud.dlp_v2.types.PrivacyMetric.LDiversityConfig): + l-diversity + + This field is a member of `oneof`_ ``type``. + k_map_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig): + k-map + + This field is a member of `oneof`_ ``type``. + delta_presence_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.DeltaPresenceEstimationConfig): + delta-presence + + This field is a member of `oneof`_ ``type``. + """ + + class NumericalStatsConfig(proto.Message): + r"""Compute numerical stats over an individual column, including + min, max, and quantiles. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Field to compute numerical stats on. + Supported types are integer, float, date, + datetime, timestamp, time. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + + class CategoricalStatsConfig(proto.Message): + r"""Compute numerical stats over an individual column, including + number of distinct values and value count distribution. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Field to compute categorical stats on. All + column types are supported except for arrays and + structs. However, it may be more informative to + use NumericalStats when the field type is + supported, depending on the data. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + + class KAnonymityConfig(proto.Message): + r"""k-anonymity metric, used for analysis of reidentification + risk. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Set of fields to compute k-anonymity over. + When multiple fields are specified, they are + considered a single composite key. Structs and + repeated data types are not supported; however, + nested fields are supported so long as they are + not structs themselves or nested within a + repeated field. + entity_id (google.cloud.dlp_v2.types.EntityId): + Message indicating that multiple rows might be associated to + a single individual. If the same entity_id is associated to + multiple quasi-identifier tuples over distinct rows, we + consider the entire collection of tuples as the composite + quasi-identifier. This collection is a multiset: the order + in which the different tuples appear in the dataset is + ignored, but their frequency is taken into account. + + Important note: a maximum of 1000 rows can be associated to + a single entity ID. If more rows are associated with the + same entity ID, some might be ignored. + """ + + quasi_ids: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + entity_id: storage.EntityId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.EntityId, + ) + + class LDiversityConfig(proto.Message): + r"""l-diversity metric, used for analysis of reidentification + risk. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Set of quasi-identifiers indicating how + equivalence classes are defined for the + l-diversity computation. When multiple fields + are specified, they are considered a single + composite key. + sensitive_attribute (google.cloud.dlp_v2.types.FieldId): + Sensitive field for computing the l-value. + """ + + quasi_ids: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + sensitive_attribute: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + + class KMapEstimationConfig(proto.Message): + r"""Reidentifiability metric. This corresponds to a risk model + similar to what is called "journalist risk" in the literature, + except the attack dataset is statistically modeled instead of + being perfectly known. This can be done using publicly available + data (like the US Census), or using a custom statistical model + (indicated as one or several BigQuery tables), or by + extrapolating from the distribution of values in the input + dataset. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.TaggedField]): + Required. Fields considered to be + quasi-identifiers. No two columns can have the + same tag. + region_code (str): + ISO 3166-1 alpha-2 region code to use in the statistical + modeling. Set if no column is tagged with a region-specific + InfoType (like US_ZIP_5) or a region code. + auxiliary_tables (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable]): + Several auxiliary tables can be used in the analysis. Each + custom_tag used to tag a quasi-identifiers column must + appear in exactly one column of one auxiliary table. + """ + + class TaggedField(proto.Message): + r"""A column with a semantic tag attached. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Required. Identifies the column. + info_type (google.cloud.dlp_v2.types.InfoType): + A column can be tagged with a InfoType to use the relevant + public dataset as a statistical model of population, if + available. We currently support US ZIP codes, region codes, + ages and genders. To programmatically obtain the list of + supported InfoTypes, use ListInfoTypes with the + supported_by=RISK_ANALYSIS filter. + + This field is a member of `oneof`_ ``tag``. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + + This field is a member of `oneof`_ ``tag``. + inferred (google.protobuf.empty_pb2.Empty): + If no semantic tag is indicated, we infer the + statistical model from the distribution of + values in the input data + + This field is a member of `oneof`_ ``tag``. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + oneof='tag', + message=storage.InfoType, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=3, + oneof='tag', + ) + inferred: empty_pb2.Empty = proto.Field( + proto.MESSAGE, + number=4, + oneof='tag', + message=empty_pb2.Empty, + ) + + class AuxiliaryTable(proto.Message): + r"""An auxiliary table contains statistical information on the + relative frequency of different quasi-identifiers values. It has + one or several quasi-identifiers columns, and one column that + indicates the relative frequency of each quasi-identifier tuple. + If a tuple is present in the data but not in the auxiliary + table, the corresponding relative frequency is assumed to be + zero (and thus, the tuple is highly reidentifiable). + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Required. Auxiliary table location. + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField]): + Required. Quasi-identifier columns. + relative_frequency (google.cloud.dlp_v2.types.FieldId): + Required. The relative frequency column must + contain a floating-point number between 0 and 1 + (inclusive). Null values are assumed to be zero. + """ + + class QuasiIdField(proto.Message): + r"""A quasi-identifier column has a custom_tag, used to know which + column in the data corresponds to which column in the statistical + model. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Identifies the column. + custom_tag (str): + A auxiliary field. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=2, + ) + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=3, + message=storage.BigQueryTable, + ) + quasi_ids: MutableSequence['PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField', + ) + relative_frequency: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + + quasi_ids: MutableSequence['PrivacyMetric.KMapEstimationConfig.TaggedField'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='PrivacyMetric.KMapEstimationConfig.TaggedField', + ) + region_code: str = proto.Field( + proto.STRING, + number=2, + ) + auxiliary_tables: MutableSequence['PrivacyMetric.KMapEstimationConfig.AuxiliaryTable'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable', + ) + + class DeltaPresenceEstimationConfig(proto.Message): + r"""δ-presence metric, used to estimate how likely it is for an + attacker to figure out that one given individual appears in a + de-identified dataset. Similarly to the k-map metric, we cannot + compute δ-presence exactly without knowing the attack dataset, + so we use a statistical model instead. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.QuasiId]): + Required. Fields considered to be + quasi-identifiers. No two fields can have the + same tag. + region_code (str): + ISO 3166-1 alpha-2 region code to use in the statistical + modeling. Set if no column is tagged with a region-specific + InfoType (like US_ZIP_5) or a region code. + auxiliary_tables (MutableSequence[google.cloud.dlp_v2.types.StatisticalTable]): + Several auxiliary tables can be used in the analysis. Each + custom_tag used to tag a quasi-identifiers field must appear + in exactly one field of one auxiliary table. + """ + + quasi_ids: MutableSequence['QuasiId'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='QuasiId', + ) + region_code: str = proto.Field( + proto.STRING, + number=2, + ) + auxiliary_tables: MutableSequence['StatisticalTable'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='StatisticalTable', + ) + + numerical_stats_config: NumericalStatsConfig = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=NumericalStatsConfig, + ) + categorical_stats_config: CategoricalStatsConfig = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message=CategoricalStatsConfig, + ) + k_anonymity_config: KAnonymityConfig = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message=KAnonymityConfig, + ) + l_diversity_config: LDiversityConfig = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message=LDiversityConfig, + ) + k_map_estimation_config: KMapEstimationConfig = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message=KMapEstimationConfig, + ) + delta_presence_estimation_config: DeltaPresenceEstimationConfig = proto.Field( + proto.MESSAGE, + number=6, + oneof='type', + message=DeltaPresenceEstimationConfig, + ) + + +class AnalyzeDataSourceRiskDetails(proto.Message): + r"""Result of a risk analysis operation request. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + requested_privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): + Privacy metric to compute. + requested_source_table (google.cloud.dlp_v2.types.BigQueryTable): + Input dataset to compute metrics over. + numerical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.NumericalStatsResult): + Numerical stats result + + This field is a member of `oneof`_ ``result``. + categorical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult): + Categorical stats result + + This field is a member of `oneof`_ ``result``. + k_anonymity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult): + K-anonymity result + + This field is a member of `oneof`_ ``result``. + l_diversity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult): + L-divesity result + + This field is a member of `oneof`_ ``result``. + k_map_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult): + K-map result + + This field is a member of `oneof`_ ``result``. + delta_presence_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult): + Delta-presence result + + This field is a member of `oneof`_ ``result``. + requested_options (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.RequestedRiskAnalysisOptions): + The configuration used for this job. + """ + + class NumericalStatsResult(proto.Message): + r"""Result of the numerical stats computation. + + Attributes: + min_value (google.cloud.dlp_v2.types.Value): + Minimum value appearing in the column. + max_value (google.cloud.dlp_v2.types.Value): + Maximum value appearing in the column. + quantile_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + List of 99 values that partition the set of + field values into 100 equal sized buckets. + """ + + min_value: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + max_value: 'Value' = proto.Field( + proto.MESSAGE, + number=2, + message='Value', + ) + quantile_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='Value', + ) + + class CategoricalStatsResult(proto.Message): + r"""Result of the categorical stats computation. + + Attributes: + value_frequency_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket]): + Histogram of value frequencies in the column. + """ + + class CategoricalStatsHistogramBucket(proto.Message): + r"""Histogram of value frequencies in the column. + + Attributes: + value_frequency_lower_bound (int): + Lower bound on the value frequency of the + values in this bucket. + value_frequency_upper_bound (int): + Upper bound on the value frequency of the + values in this bucket. + bucket_size (int): + Total number of values in this bucket. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.ValueFrequency]): + Sample of value frequencies in this bucket. + The total number of values returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct values in this + bucket. + """ + + value_frequency_lower_bound: int = proto.Field( + proto.INT64, + number=1, + ) + value_frequency_upper_bound: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=3, + ) + bucket_values: MutableSequence['ValueFrequency'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='ValueFrequency', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=5, + ) + + value_frequency_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket', + ) + + class KAnonymityResult(proto.Message): + r"""Result of the k-anonymity computation. + + Attributes: + equivalence_class_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket]): + Histogram of k-anonymity equivalence classes. + """ + + class KAnonymityEquivalenceClass(proto.Message): + r"""The set of columns' values that share the same ldiversity + value + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + Set of values defining the equivalence class. + One value per quasi-identifier column in the + original KAnonymity metric message. The order is + always the same as the original request. + equivalence_class_size (int): + Size of the equivalence class, for example + number of rows with the above set of values. + """ + + quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + equivalence_class_size: int = proto.Field( + proto.INT64, + number=2, + ) + + class KAnonymityHistogramBucket(proto.Message): + r"""Histogram of k-anonymity equivalence classes. + + Attributes: + equivalence_class_size_lower_bound (int): + Lower bound on the size of the equivalence + classes in this bucket. + equivalence_class_size_upper_bound (int): + Upper bound on the size of the equivalence + classes in this bucket. + bucket_size (int): + Total number of equivalence classes in this + bucket. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass]): + Sample of equivalence classes in this bucket. + The total number of classes returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct equivalence classes + in this bucket. + """ + + equivalence_class_size_lower_bound: int = proto.Field( + proto.INT64, + number=1, + ) + equivalence_class_size_upper_bound: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=3, + ) + bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=5, + ) + + equivalence_class_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket', + ) + + class LDiversityResult(proto.Message): + r"""Result of the l-diversity computation. + + Attributes: + sensitive_value_frequency_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket]): + Histogram of l-diversity equivalence class + sensitive value frequencies. + """ + + class LDiversityEquivalenceClass(proto.Message): + r"""The set of columns' values that share the same ldiversity + value. + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + Quasi-identifier values defining the + k-anonymity equivalence class. The order is + always the same as the original request. + equivalence_class_size (int): + Size of the k-anonymity equivalence class. + num_distinct_sensitive_values (int): + Number of distinct sensitive values in this + equivalence class. + top_sensitive_values (MutableSequence[google.cloud.dlp_v2.types.ValueFrequency]): + Estimated frequencies of top sensitive + values. + """ + + quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + equivalence_class_size: int = proto.Field( + proto.INT64, + number=2, + ) + num_distinct_sensitive_values: int = proto.Field( + proto.INT64, + number=3, + ) + top_sensitive_values: MutableSequence['ValueFrequency'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='ValueFrequency', + ) + + class LDiversityHistogramBucket(proto.Message): + r"""Histogram of l-diversity equivalence class sensitive value + frequencies. + + Attributes: + sensitive_value_frequency_lower_bound (int): + Lower bound on the sensitive value + frequencies of the equivalence classes in this + bucket. + sensitive_value_frequency_upper_bound (int): + Upper bound on the sensitive value + frequencies of the equivalence classes in this + bucket. + bucket_size (int): + Total number of equivalence classes in this + bucket. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass]): + Sample of equivalence classes in this bucket. + The total number of classes returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct equivalence classes + in this bucket. + """ + + sensitive_value_frequency_lower_bound: int = proto.Field( + proto.INT64, + number=1, + ) + sensitive_value_frequency_upper_bound: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=3, + ) + bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=5, + ) + + sensitive_value_frequency_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket', + ) + + class KMapEstimationResult(proto.Message): + r"""Result of the reidentifiability analysis. Note that these + results are an estimation, not exact values. + + Attributes: + k_map_estimation_histogram (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket]): + The intervals [min_anonymity, max_anonymity] do not overlap. + If a value doesn't correspond to any such interval, the + associated frequency is zero. For example, the following + records: {min_anonymity: 1, max_anonymity: 1, frequency: 17} + {min_anonymity: 2, max_anonymity: 3, frequency: 42} + {min_anonymity: 5, max_anonymity: 10, frequency: 99} mean + that there are no record with an estimated anonymity of 4, + 5, or larger than 10. + """ + + class KMapEstimationQuasiIdValues(proto.Message): + r"""A tuple of values for the quasi-identifier columns. + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + The quasi-identifier values. + estimated_anonymity (int): + The estimated anonymity for these + quasi-identifier values. + """ + + quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + estimated_anonymity: int = proto.Field( + proto.INT64, + number=2, + ) + + class KMapEstimationHistogramBucket(proto.Message): + r"""A KMapEstimationHistogramBucket message with the following values: + min_anonymity: 3 max_anonymity: 5 frequency: 42 means that there are + 42 records whose quasi-identifier values correspond to 3, 4 or 5 + people in the overlying population. An important particular case is + when min_anonymity = max_anonymity = 1: the frequency field then + corresponds to the number of uniquely identifiable records. + + Attributes: + min_anonymity (int): + Always positive. + max_anonymity (int): + Always greater than or equal to min_anonymity. + bucket_size (int): + Number of records within these anonymity + bounds. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues]): + Sample of quasi-identifier tuple values in + this bucket. The total number of classes + returned per bucket is capped at 20. + bucket_value_count (int): + Total number of distinct quasi-identifier + tuple values in this bucket. + """ + + min_anonymity: int = proto.Field( + proto.INT64, + number=1, + ) + max_anonymity: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=5, + ) + bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=7, + ) + + k_map_estimation_histogram: MutableSequence['AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket', + ) + + class DeltaPresenceEstimationResult(proto.Message): + r"""Result of the δ-presence computation. Note that these results + are an estimation, not exact values. + + Attributes: + delta_presence_estimation_histogram (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket]): + The intervals [min_probability, max_probability) do not + overlap. If a value doesn't correspond to any such interval, + the associated frequency is zero. For example, the following + records: {min_probability: 0, max_probability: 0.1, + frequency: 17} {min_probability: 0.2, max_probability: 0.3, + frequency: 42} {min_probability: 0.3, max_probability: 0.4, + frequency: 99} mean that there are no record with an + estimated probability in [0.1, 0.2) nor larger or equal to + 0.4. + """ + + class DeltaPresenceEstimationQuasiIdValues(proto.Message): + r"""A tuple of values for the quasi-identifier columns. + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + The quasi-identifier values. + estimated_probability (float): + The estimated probability that a given individual sharing + these quasi-identifier values is in the dataset. This value, + typically called δ, is the ratio between the number of + records in the dataset with these quasi-identifier values, + and the total number of individuals (inside *and* outside + the dataset) with these quasi-identifier values. For + example, if there are 15 individuals in the dataset who + share the same quasi-identifier values, and an estimated 100 + people in the entire population with these values, then δ is + 0.15. + """ + + quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + estimated_probability: float = proto.Field( + proto.DOUBLE, + number=2, + ) + + class DeltaPresenceEstimationHistogramBucket(proto.Message): + r"""A DeltaPresenceEstimationHistogramBucket message with the following + values: min_probability: 0.1 max_probability: 0.2 frequency: 42 + means that there are 42 records for which δ is in [0.1, 0.2). An + important particular case is when min_probability = max_probability + = 1: then, every individual who shares this quasi-identifier + combination is in the dataset. + + Attributes: + min_probability (float): + Between 0 and 1. + max_probability (float): + Always greater than or equal to min_probability. + bucket_size (int): + Number of records within these probability + bounds. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues]): + Sample of quasi-identifier tuple values in + this bucket. The total number of classes + returned per bucket is capped at 20. + bucket_value_count (int): + Total number of distinct quasi-identifier + tuple values in this bucket. + """ + + min_probability: float = proto.Field( + proto.DOUBLE, + number=1, + ) + max_probability: float = proto.Field( + proto.DOUBLE, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=5, + ) + bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=7, + ) + + delta_presence_estimation_histogram: MutableSequence['AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket', + ) + + class RequestedRiskAnalysisOptions(proto.Message): + r"""Risk analysis options. + + Attributes: + job_config (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): + The job config for the risk job. + """ + + job_config: 'RiskAnalysisJobConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='RiskAnalysisJobConfig', + ) + + requested_privacy_metric: 'PrivacyMetric' = proto.Field( + proto.MESSAGE, + number=1, + message='PrivacyMetric', + ) + requested_source_table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=2, + message=storage.BigQueryTable, + ) + numerical_stats_result: NumericalStatsResult = proto.Field( + proto.MESSAGE, + number=3, + oneof='result', + message=NumericalStatsResult, + ) + categorical_stats_result: CategoricalStatsResult = proto.Field( + proto.MESSAGE, + number=4, + oneof='result', + message=CategoricalStatsResult, + ) + k_anonymity_result: KAnonymityResult = proto.Field( + proto.MESSAGE, + number=5, + oneof='result', + message=KAnonymityResult, + ) + l_diversity_result: LDiversityResult = proto.Field( + proto.MESSAGE, + number=6, + oneof='result', + message=LDiversityResult, + ) + k_map_estimation_result: KMapEstimationResult = proto.Field( + proto.MESSAGE, + number=7, + oneof='result', + message=KMapEstimationResult, + ) + delta_presence_estimation_result: DeltaPresenceEstimationResult = proto.Field( + proto.MESSAGE, + number=9, + oneof='result', + message=DeltaPresenceEstimationResult, + ) + requested_options: RequestedRiskAnalysisOptions = proto.Field( + proto.MESSAGE, + number=10, + message=RequestedRiskAnalysisOptions, + ) + + +class ValueFrequency(proto.Message): + r"""A value of a field, including its frequency. + + Attributes: + value (google.cloud.dlp_v2.types.Value): + A value contained in the field in question. + count (int): + How many times the value is contained in the + field. + """ + + value: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + count: int = proto.Field( + proto.INT64, + number=2, + ) + + +class Value(proto.Message): + r"""Set of primitive values supported by the system. Note that for the + purposes of inspection or transformation, the number of bytes + considered to comprise a 'Value' is based on its representation as a + UTF-8 encoded string. For example, if 'integer_value' is set to + 123456789, the number of bytes would be counted as 9, even though an + int64 only holds up to 8 bytes of data. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + integer_value (int): + integer + + This field is a member of `oneof`_ ``type``. + float_value (float): + float + + This field is a member of `oneof`_ ``type``. + string_value (str): + string + + This field is a member of `oneof`_ ``type``. + boolean_value (bool): + boolean + + This field is a member of `oneof`_ ``type``. + timestamp_value (google.protobuf.timestamp_pb2.Timestamp): + timestamp + + This field is a member of `oneof`_ ``type``. + time_value (google.type.timeofday_pb2.TimeOfDay): + time of day + + This field is a member of `oneof`_ ``type``. + date_value (google.type.date_pb2.Date): + date + + This field is a member of `oneof`_ ``type``. + day_of_week_value (google.type.dayofweek_pb2.DayOfWeek): + day of week + + This field is a member of `oneof`_ ``type``. + """ + + integer_value: int = proto.Field( + proto.INT64, + number=1, + oneof='type', + ) + float_value: float = proto.Field( + proto.DOUBLE, + number=2, + oneof='type', + ) + string_value: str = proto.Field( + proto.STRING, + number=3, + oneof='type', + ) + boolean_value: bool = proto.Field( + proto.BOOL, + number=4, + oneof='type', + ) + timestamp_value: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message=timestamp_pb2.Timestamp, + ) + time_value: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=6, + oneof='type', + message=timeofday_pb2.TimeOfDay, + ) + date_value: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=7, + oneof='type', + message=date_pb2.Date, + ) + day_of_week_value: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=8, + oneof='type', + enum=dayofweek_pb2.DayOfWeek, + ) + + +class QuoteInfo(proto.Message): + r"""Message for infoType-dependent details parsed from quote. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + date_time (google.cloud.dlp_v2.types.DateTime): + The date time indicated by the quote. + + This field is a member of `oneof`_ ``parsed_quote``. + """ + + date_time: 'DateTime' = proto.Field( + proto.MESSAGE, + number=2, + oneof='parsed_quote', + message='DateTime', + ) + + +class DateTime(proto.Message): + r"""Message for a date time object. + e.g. 2018-01-01, 5th August. + + Attributes: + date (google.type.date_pb2.Date): + One or more of the following must be set. + Must be a valid date or time value. + day_of_week (google.type.dayofweek_pb2.DayOfWeek): + Day of week + time (google.type.timeofday_pb2.TimeOfDay): + Time of day + time_zone (google.cloud.dlp_v2.types.DateTime.TimeZone): + Time zone + """ + + class TimeZone(proto.Message): + r"""Time zone of the date time object. + + Attributes: + offset_minutes (int): + Set only if the offset can be determined. + Positive for time ahead of UTC. E.g. For + "UTC-9", this value is -540. + """ + + offset_minutes: int = proto.Field( + proto.INT32, + number=1, + ) + + date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=1, + message=date_pb2.Date, + ) + day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=2, + enum=dayofweek_pb2.DayOfWeek, + ) + time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=3, + message=timeofday_pb2.TimeOfDay, + ) + time_zone: TimeZone = proto.Field( + proto.MESSAGE, + number=4, + message=TimeZone, + ) + + +class DeidentifyConfig(proto.Message): + r"""The configuration that controls how the data will change. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): + Treat the dataset as free-form text and apply + the same free text transformation everywhere. + + This field is a member of `oneof`_ ``transformation``. + record_transformations (google.cloud.dlp_v2.types.RecordTransformations): + Treat the dataset as structured. + Transformations can be applied to specific + locations within structured datasets, such as + transforming a column within a table. + + This field is a member of `oneof`_ ``transformation``. + image_transformations (google.cloud.dlp_v2.types.ImageTransformations): + Treat the dataset as an image and redact. + + This field is a member of `oneof`_ ``transformation``. + transformation_error_handling (google.cloud.dlp_v2.types.TransformationErrorHandling): + Mode for handling transformation errors. If left + unspecified, the default mode is + ``TransformationErrorHandling.ThrowError``. + """ + + info_type_transformations: 'InfoTypeTransformations' = proto.Field( + proto.MESSAGE, + number=1, + oneof='transformation', + message='InfoTypeTransformations', + ) + record_transformations: 'RecordTransformations' = proto.Field( + proto.MESSAGE, + number=2, + oneof='transformation', + message='RecordTransformations', + ) + image_transformations: 'ImageTransformations' = proto.Field( + proto.MESSAGE, + number=4, + oneof='transformation', + message='ImageTransformations', + ) + transformation_error_handling: 'TransformationErrorHandling' = proto.Field( + proto.MESSAGE, + number=3, + message='TransformationErrorHandling', + ) + + +class ImageTransformations(proto.Message): + r"""A type of transformation that is applied over images. + + Attributes: + transforms (MutableSequence[google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation]): + + """ + + class ImageTransformation(proto.Message): + r"""Configuration for determining how redaction of images should + occur. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + selected_info_types (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.SelectedInfoTypes): + Apply transformation to the selected info_types. + + This field is a member of `oneof`_ ``target``. + all_info_types (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.AllInfoTypes): + Apply transformation to all findings not specified in other + ImageTransformation's selected_info_types. Only one instance + is allowed within the ImageTransformations message. + + This field is a member of `oneof`_ ``target``. + all_text (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.AllText): + Apply transformation to all text that doesn't + match an infoType. Only one instance is allowed + within the ImageTransformations message. + + This field is a member of `oneof`_ ``target``. + redaction_color (google.cloud.dlp_v2.types.Color): + The color to use when redacting content from + an image. If not specified, the default is + black. + """ + + class SelectedInfoTypes(proto.Message): + r"""Apply transformation to the selected info_types. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + Required. InfoTypes to apply the + transformation to. Required. Provided InfoType + must be unique within the ImageTransformations + message. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=storage.InfoType, + ) + + class AllInfoTypes(proto.Message): + r"""Apply transformation to all findings. + """ + + class AllText(proto.Message): + r"""Apply to all text. + """ + + selected_info_types: 'ImageTransformations.ImageTransformation.SelectedInfoTypes' = proto.Field( + proto.MESSAGE, + number=4, + oneof='target', + message='ImageTransformations.ImageTransformation.SelectedInfoTypes', + ) + all_info_types: 'ImageTransformations.ImageTransformation.AllInfoTypes' = proto.Field( + proto.MESSAGE, + number=5, + oneof='target', + message='ImageTransformations.ImageTransformation.AllInfoTypes', + ) + all_text: 'ImageTransformations.ImageTransformation.AllText' = proto.Field( + proto.MESSAGE, + number=6, + oneof='target', + message='ImageTransformations.ImageTransformation.AllText', + ) + redaction_color: 'Color' = proto.Field( + proto.MESSAGE, + number=3, + message='Color', + ) + + transforms: MutableSequence[ImageTransformation] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=ImageTransformation, + ) + + +class TransformationErrorHandling(proto.Message): + r"""How to handle transformation errors during de-identification. A + transformation error occurs when the requested transformation is + incompatible with the data. For example, trying to de-identify an IP + address using a ``DateShift`` transformation would result in a + transformation error, since date info cannot be extracted from an IP + address. Information about any incompatible transformations, and how + they were handled, is returned in the response as part of the + ``TransformationOverviews``. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + throw_error (google.cloud.dlp_v2.types.TransformationErrorHandling.ThrowError): + Throw an error + + This field is a member of `oneof`_ ``mode``. + leave_untransformed (google.cloud.dlp_v2.types.TransformationErrorHandling.LeaveUntransformed): + Ignore errors + + This field is a member of `oneof`_ ``mode``. + """ + + class ThrowError(proto.Message): + r"""Throw an error and fail the request when a transformation + error occurs. + + """ + + class LeaveUntransformed(proto.Message): + r"""Skips the data without modifying it if the requested transformation + would cause an error. For example, if a ``DateShift`` transformation + were applied an an IP address, this mode would leave the IP address + unchanged in the response. + + """ + + throw_error: ThrowError = proto.Field( + proto.MESSAGE, + number=1, + oneof='mode', + message=ThrowError, + ) + leave_untransformed: LeaveUntransformed = proto.Field( + proto.MESSAGE, + number=2, + oneof='mode', + message=LeaveUntransformed, + ) + + +class PrimitiveTransformation(proto.Message): + r"""A rule for transforming a value. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + replace_config (google.cloud.dlp_v2.types.ReplaceValueConfig): + Replace with a specified value. + + This field is a member of `oneof`_ ``transformation``. + redact_config (google.cloud.dlp_v2.types.RedactConfig): + Redact + + This field is a member of `oneof`_ ``transformation``. + character_mask_config (google.cloud.dlp_v2.types.CharacterMaskConfig): + Mask + + This field is a member of `oneof`_ ``transformation``. + crypto_replace_ffx_fpe_config (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig): + Ffx-Fpe + + This field is a member of `oneof`_ ``transformation``. + fixed_size_bucketing_config (google.cloud.dlp_v2.types.FixedSizeBucketingConfig): + Fixed size bucketing + + This field is a member of `oneof`_ ``transformation``. + bucketing_config (google.cloud.dlp_v2.types.BucketingConfig): + Bucketing + + This field is a member of `oneof`_ ``transformation``. + replace_with_info_type_config (google.cloud.dlp_v2.types.ReplaceWithInfoTypeConfig): + Replace with infotype + + This field is a member of `oneof`_ ``transformation``. + time_part_config (google.cloud.dlp_v2.types.TimePartConfig): + Time extraction + + This field is a member of `oneof`_ ``transformation``. + crypto_hash_config (google.cloud.dlp_v2.types.CryptoHashConfig): + Crypto + + This field is a member of `oneof`_ ``transformation``. + date_shift_config (google.cloud.dlp_v2.types.DateShiftConfig): + Date Shift + + This field is a member of `oneof`_ ``transformation``. + crypto_deterministic_config (google.cloud.dlp_v2.types.CryptoDeterministicConfig): + Deterministic Crypto + + This field is a member of `oneof`_ ``transformation``. + replace_dictionary_config (google.cloud.dlp_v2.types.ReplaceDictionaryConfig): + Replace with a value randomly drawn (with + replacement) from a dictionary. + + This field is a member of `oneof`_ ``transformation``. + """ + + replace_config: 'ReplaceValueConfig' = proto.Field( + proto.MESSAGE, + number=1, + oneof='transformation', + message='ReplaceValueConfig', + ) + redact_config: 'RedactConfig' = proto.Field( + proto.MESSAGE, + number=2, + oneof='transformation', + message='RedactConfig', + ) + character_mask_config: 'CharacterMaskConfig' = proto.Field( + proto.MESSAGE, + number=3, + oneof='transformation', + message='CharacterMaskConfig', + ) + crypto_replace_ffx_fpe_config: 'CryptoReplaceFfxFpeConfig' = proto.Field( + proto.MESSAGE, + number=4, + oneof='transformation', + message='CryptoReplaceFfxFpeConfig', + ) + fixed_size_bucketing_config: 'FixedSizeBucketingConfig' = proto.Field( + proto.MESSAGE, + number=5, + oneof='transformation', + message='FixedSizeBucketingConfig', + ) + bucketing_config: 'BucketingConfig' = proto.Field( + proto.MESSAGE, + number=6, + oneof='transformation', + message='BucketingConfig', + ) + replace_with_info_type_config: 'ReplaceWithInfoTypeConfig' = proto.Field( + proto.MESSAGE, + number=7, + oneof='transformation', + message='ReplaceWithInfoTypeConfig', + ) + time_part_config: 'TimePartConfig' = proto.Field( + proto.MESSAGE, + number=8, + oneof='transformation', + message='TimePartConfig', + ) + crypto_hash_config: 'CryptoHashConfig' = proto.Field( + proto.MESSAGE, + number=9, + oneof='transformation', + message='CryptoHashConfig', + ) + date_shift_config: 'DateShiftConfig' = proto.Field( + proto.MESSAGE, + number=11, + oneof='transformation', + message='DateShiftConfig', + ) + crypto_deterministic_config: 'CryptoDeterministicConfig' = proto.Field( + proto.MESSAGE, + number=12, + oneof='transformation', + message='CryptoDeterministicConfig', + ) + replace_dictionary_config: 'ReplaceDictionaryConfig' = proto.Field( + proto.MESSAGE, + number=13, + oneof='transformation', + message='ReplaceDictionaryConfig', + ) + + +class TimePartConfig(proto.Message): + r"""For use with ``Date``, ``Timestamp``, and ``TimeOfDay``, extract or + preserve a portion of the value. + + Attributes: + part_to_extract (google.cloud.dlp_v2.types.TimePartConfig.TimePart): + The part of the time to keep. + """ + class TimePart(proto.Enum): + r"""Components that make up time. + + Values: + TIME_PART_UNSPECIFIED (0): + Unused + YEAR (1): + [0-9999] + MONTH (2): + [1-12] + DAY_OF_MONTH (3): + [1-31] + DAY_OF_WEEK (4): + [1-7] + WEEK_OF_YEAR (5): + [1-53] + HOUR_OF_DAY (6): + [0-23] + """ + TIME_PART_UNSPECIFIED = 0 + YEAR = 1 + MONTH = 2 + DAY_OF_MONTH = 3 + DAY_OF_WEEK = 4 + WEEK_OF_YEAR = 5 + HOUR_OF_DAY = 6 + + part_to_extract: TimePart = proto.Field( + proto.ENUM, + number=1, + enum=TimePart, + ) + + +class CryptoHashConfig(proto.Message): + r"""Pseudonymization method that generates surrogates via + cryptographic hashing. Uses SHA-256. + The key size must be either 32 or 64 bytes. + Outputs a base64 encoded representation of the hashed output + (for example, L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=). + Currently, only string and integer values can be hashed. See + https://cloud.google.com/dlp/docs/pseudonymization to learn + more. + + Attributes: + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + The key used by the hash function. + """ + + crypto_key: 'CryptoKey' = proto.Field( + proto.MESSAGE, + number=1, + message='CryptoKey', + ) + + +class CryptoDeterministicConfig(proto.Message): + r"""Pseudonymization method that generates deterministic + encryption for the given input. Outputs a base64 encoded + representation of the encrypted output. Uses AES-SIV based on + the RFC https://tools.ietf.org/html/rfc5297. + + Attributes: + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + The key used by the encryption function. For + deterministic encryption using AES-SIV, the + provided key is internally expanded to 64 bytes + prior to use. + surrogate_info_type (google.cloud.dlp_v2.types.InfoType): + The custom info type to annotate the surrogate with. This + annotation will be applied to the surrogate by prefixing it + with the name of the custom info type followed by the number + of characters comprising the surrogate. The following scheme + defines the format: {info type name}({surrogate character + count}):{surrogate} + + For example, if the name of custom info type is + 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full + replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' + + This annotation identifies the surrogate when inspecting + content using the custom info type 'Surrogate'. This + facilitates reversal of the surrogate when it occurs in free + text. + + Note: For record transformations where the entire cell in a + table is being transformed, surrogates are not mandatory. + Surrogates are used to denote the location of the token and + are necessary for re-identification in free form text. + + In order for inspection to work properly, the name of this + info type must not occur naturally anywhere in your data; + otherwise, inspection may either + + - reverse a surrogate that does not correspond to an actual + identifier + - be unable to parse the surrogate and result in an error + + Therefore, choose your custom info type name carefully after + considering what your data looks like. One way to select a + name that has a high chance of yielding reliable detection + is to include one or more unicode characters that are highly + improbable to exist in your data. For example, assuming your + data is entered from a regular ASCII keyboard, the symbol + with the hex code point 29DD might be used like so: + ⧝MY_TOKEN_TYPE. + context (google.cloud.dlp_v2.types.FieldId): + A context may be used for higher security and maintaining + referential integrity such that the same identifier in two + different contexts will be given a distinct surrogate. The + context is appended to plaintext value being encrypted. On + decryption the provided context is validated against the + value used during encryption. If a context was provided + during encryption, same context must be provided during + decryption as well. + + If the context is not set, plaintext would be used as is for + encryption. If the context is set but: + + 1. there is no record present when transforming a given + value or + 2. the field is not present when transforming a given value, + + plaintext would be used as is for encryption. + + Note that case (1) is expected when an + ``InfoTypeTransformation`` is applied to both structured and + unstructured ``ContentItem``\ s. + """ + + crypto_key: 'CryptoKey' = proto.Field( + proto.MESSAGE, + number=1, + message='CryptoKey', + ) + surrogate_info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + message=storage.InfoType, + ) + context: storage.FieldId = proto.Field( + proto.MESSAGE, + number=3, + message=storage.FieldId, + ) + + +class ReplaceValueConfig(proto.Message): + r"""Replace each input value with a given ``Value``. + + Attributes: + new_value (google.cloud.dlp_v2.types.Value): + Value to replace it with. + """ + + new_value: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + + +class ReplaceDictionaryConfig(proto.Message): + r"""Replace each input value with a value randomly selected from + the dictionary. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): + A list of words to select from for random replacement. The + `limits `__ page + contains details about the size limits of dictionaries. + + This field is a member of `oneof`_ ``type``. + """ + + word_list: storage.CustomInfoType.Dictionary.WordList = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.CustomInfoType.Dictionary.WordList, + ) + + +class ReplaceWithInfoTypeConfig(proto.Message): + r"""Replace each matching finding with the name of the info_type. + """ + + +class RedactConfig(proto.Message): + r"""Redact a given value. For example, if used with an + ``InfoTypeTransformation`` transforming PHONE_NUMBER, and input 'My + phone number is 206-555-0123', the output would be 'My phone number + is '. + + """ + + +class CharsToIgnore(proto.Message): + r"""Characters to skip when doing deidentification of a value. + These will be left alone and skipped. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + characters_to_skip (str): + Characters to not transform when masking. + + This field is a member of `oneof`_ ``characters``. + common_characters_to_ignore (google.cloud.dlp_v2.types.CharsToIgnore.CommonCharsToIgnore): + Common characters to not transform when + masking. Useful to avoid removing punctuation. + + This field is a member of `oneof`_ ``characters``. + """ + class CommonCharsToIgnore(proto.Enum): + r"""Convenience enum for indicating common characters to not + transform. + + Values: + COMMON_CHARS_TO_IGNORE_UNSPECIFIED (0): + Unused. + NUMERIC (1): + 0-9 + ALPHA_UPPER_CASE (2): + A-Z + ALPHA_LOWER_CASE (3): + a-z + PUNCTUATION (4): + US Punctuation, one of !"#$%&'()*+,-./:;<=>?@[]^_`{|}~ + WHITESPACE (5): + Whitespace character, one of [ \\t\n\x0B\f\r] + """ + COMMON_CHARS_TO_IGNORE_UNSPECIFIED = 0 + NUMERIC = 1 + ALPHA_UPPER_CASE = 2 + ALPHA_LOWER_CASE = 3 + PUNCTUATION = 4 + WHITESPACE = 5 + + characters_to_skip: str = proto.Field( + proto.STRING, + number=1, + oneof='characters', + ) + common_characters_to_ignore: CommonCharsToIgnore = proto.Field( + proto.ENUM, + number=2, + oneof='characters', + enum=CommonCharsToIgnore, + ) + + +class CharacterMaskConfig(proto.Message): + r"""Partially mask a string by replacing a given number of characters + with a fixed character. Masking can start from the beginning or end + of the string. This can be used on data of any type (numbers, longs, + and so on) and when de-identifying structured data we'll attempt to + preserve the original data's type. (This allows you to take a long + like 123 and modify it to a string like \**3. + + Attributes: + masking_character (str): + Character to use to mask the sensitive values—for example, + ``*`` for an alphabetic string such as a name, or ``0`` for + a numeric string such as ZIP code or credit card number. + This string must have a length of 1. If not supplied, this + value defaults to ``*`` for strings, and ``0`` for digits. + number_to_mask (int): + Number of characters to mask. If not set, all matching chars + will be masked. Skipped characters do not count towards this + tally. + + If ``number_to_mask`` is negative, this denotes inverse + masking. Cloud DLP masks all but a number of characters. For + example, suppose you have the following values: + + - ``masking_character`` is ``*`` + - ``number_to_mask`` is ``-4`` + - ``reverse_order`` is ``false`` + - ``CharsToIgnore`` includes ``-`` + - Input string is ``1234-5678-9012-3456`` + + The resulting de-identified string is + ``****-****-****-3456``. Cloud DLP masks all but the last + four characters. If ``reverse_order`` is ``true``, all but + the first four characters are masked as + ``1234-****-****-****``. + reverse_order (bool): + Mask characters in reverse order. For example, if + ``masking_character`` is ``0``, ``number_to_mask`` is + ``14``, and ``reverse_order`` is ``false``, then the input + string ``1234-5678-9012-3456`` is masked as + ``00000000000000-3456``. If ``masking_character`` is ``*``, + ``number_to_mask`` is ``3``, and ``reverse_order`` is + ``true``, then the string ``12345`` is masked as ``12***``. + characters_to_ignore (MutableSequence[google.cloud.dlp_v2.types.CharsToIgnore]): + When masking a string, items in this list will be skipped + when replacing characters. For example, if the input string + is ``555-555-5555`` and you instruct Cloud DLP to skip ``-`` + and mask 5 characters with ``*``, Cloud DLP returns + ``***-**5-5555``. + """ + + masking_character: str = proto.Field( + proto.STRING, + number=1, + ) + number_to_mask: int = proto.Field( + proto.INT32, + number=2, + ) + reverse_order: bool = proto.Field( + proto.BOOL, + number=3, + ) + characters_to_ignore: MutableSequence['CharsToIgnore'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='CharsToIgnore', + ) + + +class FixedSizeBucketingConfig(proto.Message): + r"""Buckets values based on fixed size ranges. The Bucketing + transformation can provide all of this functionality, but requires + more configuration. This message is provided as a convenience to the + user for simple bucketing strategies. + + The transformed value will be a hyphenated string of + {lower_bound}-{upper_bound}. For example, if lower_bound = 10 and + upper_bound = 20, all values that are within this bucket will be + replaced with "10-20". + + This can be used on data of type: double, long. + + If the bound Value type differs from the type of data being + transformed, we will first attempt converting the type of the data + to be transformed to match the type of the bound before comparing. + + See https://cloud.google.com/dlp/docs/concepts-bucketing to learn + more. + + Attributes: + lower_bound (google.cloud.dlp_v2.types.Value): + Required. Lower bound value of buckets. All values less than + ``lower_bound`` are grouped together into a single bucket; + for example if ``lower_bound`` = 10, then all values less + than 10 are replaced with the value "-10". + upper_bound (google.cloud.dlp_v2.types.Value): + Required. Upper bound value of buckets. All values greater + than upper_bound are grouped together into a single bucket; + for example if ``upper_bound`` = 89, then all values greater + than 89 are replaced with the value "89+". + bucket_size (float): + Required. Size of each bucket (except for minimum and + maximum buckets). So if ``lower_bound`` = 10, + ``upper_bound`` = 89, and ``bucket_size`` = 10, then the + following buckets would be used: -10, 10-20, 20-30, 30-40, + 40-50, 50-60, 60-70, 70-80, 80-89, 89+. Precision up to 2 + decimals works. + """ + + lower_bound: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + upper_bound: 'Value' = proto.Field( + proto.MESSAGE, + number=2, + message='Value', + ) + bucket_size: float = proto.Field( + proto.DOUBLE, + number=3, + ) + + +class BucketingConfig(proto.Message): + r"""Generalization function that buckets values based on ranges. The + ranges and replacement values are dynamically provided by the user + for custom behavior, such as 1-30 -> LOW 31-65 -> MEDIUM 66-100 -> + HIGH This can be used on data of type: number, long, string, + timestamp. If the bound ``Value`` type differs from the type of data + being transformed, we will first attempt converting the type of the + data to be transformed to match the type of the bound before + comparing. See https://cloud.google.com/dlp/docs/concepts-bucketing + to learn more. + + Attributes: + buckets (MutableSequence[google.cloud.dlp_v2.types.BucketingConfig.Bucket]): + Set of buckets. Ranges must be + non-overlapping. + """ + + class Bucket(proto.Message): + r"""Bucket is represented as a range, along with replacement + values. + + Attributes: + min_ (google.cloud.dlp_v2.types.Value): + Lower bound of the range, inclusive. Type + should be the same as max if used. + max_ (google.cloud.dlp_v2.types.Value): + Upper bound of the range, exclusive; type + must match min. + replacement_value (google.cloud.dlp_v2.types.Value): + Required. Replacement value for this bucket. + """ + + min_: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + max_: 'Value' = proto.Field( + proto.MESSAGE, + number=2, + message='Value', + ) + replacement_value: 'Value' = proto.Field( + proto.MESSAGE, + number=3, + message='Value', + ) + + buckets: MutableSequence[Bucket] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Bucket, + ) + + +class CryptoReplaceFfxFpeConfig(proto.Message): + r"""Replaces an identifier with a surrogate using Format Preserving + Encryption (FPE) with the FFX mode of operation; however when used + in the ``ReidentifyContent`` API method, it serves the opposite + function by reversing the surrogate back into the original + identifier. The identifier must be encoded as ASCII. For a given + crypto key and context, the same identifier will be replaced with + the same surrogate. Identifiers must be at least two characters + long. In the case that the identifier is the empty string, it will + be skipped. See https://cloud.google.com/dlp/docs/pseudonymization + to learn more. + + Note: We recommend using CryptoDeterministicConfig for all use cases + which do not require preserving the input alphabet space and size, + plus warrant referential integrity. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + Required. The key used by the encryption + algorithm. + context (google.cloud.dlp_v2.types.FieldId): + The 'tweak', a context may be used for higher security since + the same identifier in two different contexts won't be given + the same surrogate. If the context is not set, a default + tweak will be used. + + If the context is set but: + + 1. there is no record present when transforming a given + value or + 2. the field is not present when transforming a given value, + + a default tweak will be used. + + Note that case (1) is expected when an + ``InfoTypeTransformation`` is applied to both structured and + unstructured ``ContentItem``\ s. Currently, the referenced + field may be of value type integer or string. + + The tweak is constructed as a sequence of bytes in big + endian byte order such that: + + - a 64 bit integer is encoded followed by a single byte of + value 1 + - a string is encoded in UTF-8 format followed by a single + byte of value 2 + common_alphabet (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig.FfxCommonNativeAlphabet): + Common alphabets. + + This field is a member of `oneof`_ ``alphabet``. + custom_alphabet (str): + This is supported by mapping these to the alphanumeric + characters that the FFX mode natively supports. This happens + before/after encryption/decryption. Each character listed + must appear only once. Number of characters must be in the + range [2, 95]. This must be encoded as ASCII. The order of + characters does not matter. The full list of allowed + characters is: + 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz + ~`!@#$%^&*()_-+={[}]|:;"'<,>.?/ + + This field is a member of `oneof`_ ``alphabet``. + radix (int): + The native way to select the alphabet. Must be in the range + [2, 95]. + + This field is a member of `oneof`_ ``alphabet``. + surrogate_info_type (google.cloud.dlp_v2.types.InfoType): + The custom infoType to annotate the surrogate with. This + annotation will be applied to the surrogate by prefixing it + with the name of the custom infoType followed by the number + of characters comprising the surrogate. The following scheme + defines the format: + info_type_name(surrogate_character_count):surrogate + + For example, if the name of custom infoType is + 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full + replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' + + This annotation identifies the surrogate when inspecting + content using the custom infoType + ```SurrogateType`` `__. + This facilitates reversal of the surrogate when it occurs in + free text. + + In order for inspection to work properly, the name of this + infoType must not occur naturally anywhere in your data; + otherwise, inspection may find a surrogate that does not + correspond to an actual identifier. Therefore, choose your + custom infoType name carefully after considering what your + data looks like. One way to select a name that has a high + chance of yielding reliable detection is to include one or + more unicode characters that are highly improbable to exist + in your data. For example, assuming your data is entered + from a regular ASCII keyboard, the symbol with the hex code + point 29DD might be used like so: ⧝MY_TOKEN_TYPE + """ + class FfxCommonNativeAlphabet(proto.Enum): + r"""These are commonly used subsets of the alphabet that the FFX + mode natively supports. In the algorithm, the alphabet is + selected using the "radix". Therefore each corresponds to a + particular radix. + + Values: + FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED (0): + Unused. + NUMERIC (1): + ``[0-9]`` (radix of 10) + HEXADECIMAL (2): + ``[0-9A-F]`` (radix of 16) + UPPER_CASE_ALPHA_NUMERIC (3): + ``[0-9A-Z]`` (radix of 36) + ALPHA_NUMERIC (4): + ``[0-9A-Za-z]`` (radix of 62) + """ + FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED = 0 + NUMERIC = 1 + HEXADECIMAL = 2 + UPPER_CASE_ALPHA_NUMERIC = 3 + ALPHA_NUMERIC = 4 + + crypto_key: 'CryptoKey' = proto.Field( + proto.MESSAGE, + number=1, + message='CryptoKey', + ) + context: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + common_alphabet: FfxCommonNativeAlphabet = proto.Field( + proto.ENUM, + number=4, + oneof='alphabet', + enum=FfxCommonNativeAlphabet, + ) + custom_alphabet: str = proto.Field( + proto.STRING, + number=5, + oneof='alphabet', + ) + radix: int = proto.Field( + proto.INT32, + number=6, + oneof='alphabet', + ) + surrogate_info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=8, + message=storage.InfoType, + ) + + +class CryptoKey(proto.Message): + r"""This is a data encryption key (DEK) (as opposed to + a key encryption key (KEK) stored by Cloud Key Management + Service (Cloud KMS). + When using Cloud KMS to wrap or unwrap a DEK, be sure to set an + appropriate IAM policy on the KEK to ensure an attacker cannot + unwrap the DEK. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + transient (google.cloud.dlp_v2.types.TransientCryptoKey): + Transient crypto key + + This field is a member of `oneof`_ ``source``. + unwrapped (google.cloud.dlp_v2.types.UnwrappedCryptoKey): + Unwrapped crypto key + + This field is a member of `oneof`_ ``source``. + kms_wrapped (google.cloud.dlp_v2.types.KmsWrappedCryptoKey): + Key wrapped using Cloud KMS + + This field is a member of `oneof`_ ``source``. + """ + + transient: 'TransientCryptoKey' = proto.Field( + proto.MESSAGE, + number=1, + oneof='source', + message='TransientCryptoKey', + ) + unwrapped: 'UnwrappedCryptoKey' = proto.Field( + proto.MESSAGE, + number=2, + oneof='source', + message='UnwrappedCryptoKey', + ) + kms_wrapped: 'KmsWrappedCryptoKey' = proto.Field( + proto.MESSAGE, + number=3, + oneof='source', + message='KmsWrappedCryptoKey', + ) + + +class TransientCryptoKey(proto.Message): + r"""Use this to have a random data crypto key generated. + It will be discarded after the request finishes. + + Attributes: + name (str): + Required. Name of the key. This is an arbitrary string used + to differentiate different keys. A unique key is generated + per name: two separate ``TransientCryptoKey`` protos share + the same generated key if their names are the same. When the + data crypto key is generated, this name is not used in any + way (repeating the api call will result in a different key + being generated). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UnwrappedCryptoKey(proto.Message): + r"""Using raw keys is prone to security risks due to accidentally + leaking the key. Choose another type of key if possible. + + Attributes: + key (bytes): + Required. A 128/192/256 bit key. + """ + + key: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + +class KmsWrappedCryptoKey(proto.Message): + r"""Include to use an existing data crypto key wrapped by KMS. The + wrapped key must be a 128-, 192-, or 256-bit key. Authorization + requires the following IAM permissions when sending a request to + perform a crypto transformation using a KMS-wrapped crypto key: + dlp.kms.encrypt + + For more information, see [Creating a wrapped key] + (https://cloud.google.com/dlp/docs/create-wrapped-key). + + Note: When you use Cloud KMS for cryptographic operations, `charges + apply `__. + + Attributes: + wrapped_key (bytes): + Required. The wrapped data crypto key. + crypto_key_name (str): + Required. The resource name of the KMS + CryptoKey to use for unwrapping. + """ + + wrapped_key: bytes = proto.Field( + proto.BYTES, + number=1, + ) + crypto_key_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DateShiftConfig(proto.Message): + r"""Shifts dates by random number of days, with option to be + consistent for the same context. See + https://cloud.google.com/dlp/docs/concepts-date-shifting to + learn more. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + upper_bound_days (int): + Required. Range of shift in days. Actual + shift will be selected at random within this + range (inclusive ends). Negative means shift to + earlier in time. Must not be more than 365250 + days (1000 years) each direction. + For example, 3 means shift date to at most 3 + days into the future. + lower_bound_days (int): + Required. For example, -5 means shift date to + at most 5 days back in the past. + context (google.cloud.dlp_v2.types.FieldId): + Points to the field that contains the + context, for example, an entity id. If set, must + also set cryptoKey. If set, shift will be + consistent for the given context. + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + Causes the shift to be computed based on this key and the + context. This results in the same shift for the same context + and crypto_key. If set, must also set context. Can only be + applied to table items. + + This field is a member of `oneof`_ ``method``. + """ + + upper_bound_days: int = proto.Field( + proto.INT32, + number=1, + ) + lower_bound_days: int = proto.Field( + proto.INT32, + number=2, + ) + context: storage.FieldId = proto.Field( + proto.MESSAGE, + number=3, + message=storage.FieldId, + ) + crypto_key: 'CryptoKey' = proto.Field( + proto.MESSAGE, + number=4, + oneof='method', + message='CryptoKey', + ) + + +class InfoTypeTransformations(proto.Message): + r"""A type of transformation that will scan unstructured text and apply + various ``PrimitiveTransformation``\ s to each finding, where the + transformation is applied to only values that were identified as a + specific info_type. + + Attributes: + transformations (MutableSequence[google.cloud.dlp_v2.types.InfoTypeTransformations.InfoTypeTransformation]): + Required. Transformation for each infoType. + Cannot specify more than one for a given + infoType. + """ + + class InfoTypeTransformation(proto.Message): + r"""A transformation to apply to text that is identified as a specific + info_type. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + InfoTypes to apply the transformation to. An empty list will + cause this transformation to apply to all findings that + correspond to infoTypes that were requested in + ``InspectConfig``. + primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): + Required. Primitive transformation to apply + to the infoType. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + primitive_transformation: 'PrimitiveTransformation' = proto.Field( + proto.MESSAGE, + number=2, + message='PrimitiveTransformation', + ) + + transformations: MutableSequence[InfoTypeTransformation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=InfoTypeTransformation, + ) + + +class FieldTransformation(proto.Message): + r"""The transformation to apply to the field. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Required. Input field(s) to apply the transformation to. + When you have columns that reference their position within a + list, omit the index from the FieldId. FieldId name matching + ignores the index. For example, instead of + "contact.nums[0].type", use "contact.nums.type". + condition (google.cloud.dlp_v2.types.RecordCondition): + Only apply the transformation if the condition evaluates to + true for the given ``RecordCondition``. The conditions are + allowed to reference fields that are not used in the actual + transformation. + + Example Use Cases: + + - Apply a different bucket transformation to an age column + if the zip code column for the same record is within a + specific range. + - Redact a field if the date of birth field is greater than + 85. + primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): + Apply the transformation to the entire field. + + This field is a member of `oneof`_ ``transformation``. + info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): + Treat the contents of the field as free text, and + selectively transform content that matches an ``InfoType``. + + This field is a member of `oneof`_ ``transformation``. + """ + + fields: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + condition: 'RecordCondition' = proto.Field( + proto.MESSAGE, + number=3, + message='RecordCondition', + ) + primitive_transformation: 'PrimitiveTransformation' = proto.Field( + proto.MESSAGE, + number=4, + oneof='transformation', + message='PrimitiveTransformation', + ) + info_type_transformations: 'InfoTypeTransformations' = proto.Field( + proto.MESSAGE, + number=5, + oneof='transformation', + message='InfoTypeTransformations', + ) + + +class RecordTransformations(proto.Message): + r"""A type of transformation that is applied over structured data + such as a table. + + Attributes: + field_transformations (MutableSequence[google.cloud.dlp_v2.types.FieldTransformation]): + Transform the record by applying various + field transformations. + record_suppressions (MutableSequence[google.cloud.dlp_v2.types.RecordSuppression]): + Configuration defining which records get + suppressed entirely. Records that match any + suppression rule are omitted from the output. + """ + + field_transformations: MutableSequence['FieldTransformation'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='FieldTransformation', + ) + record_suppressions: MutableSequence['RecordSuppression'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='RecordSuppression', + ) + + +class RecordSuppression(proto.Message): + r"""Configuration to suppress records whose suppression + conditions evaluate to true. + + Attributes: + condition (google.cloud.dlp_v2.types.RecordCondition): + A condition that when it evaluates to true + will result in the record being evaluated to be + suppressed from the transformed content. + """ + + condition: 'RecordCondition' = proto.Field( + proto.MESSAGE, + number=1, + message='RecordCondition', + ) + + +class RecordCondition(proto.Message): + r"""A condition for determining whether a transformation should + be applied to a field. + + Attributes: + expressions (google.cloud.dlp_v2.types.RecordCondition.Expressions): + An expression. + """ + + class Condition(proto.Message): + r"""The field type of ``value`` and ``field`` do not need to match to be + considered equal, but not all comparisons are possible. EQUAL_TO and + NOT_EQUAL_TO attempt to compare even with incompatible types, but + all other comparisons are invalid with incompatible types. A + ``value`` of type: + + - ``string`` can be compared against all other types + - ``boolean`` can only be compared against other booleans + - ``integer`` can be compared against doubles or a string if the + string value can be parsed as an integer. + - ``double`` can be compared against integers or a string if the + string can be parsed as a double. + - ``Timestamp`` can be compared against strings in RFC 3339 date + string format. + - ``TimeOfDay`` can be compared against timestamps and strings in + the format of 'HH:mm:ss'. + + If we fail to compare do to type mismatch, a warning will be given + and the condition will evaluate to false. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Required. Field within the record this + condition is evaluated against. + operator (google.cloud.dlp_v2.types.RelationalOperator): + Required. Operator used to compare the field + or infoType to the value. + value (google.cloud.dlp_v2.types.Value): + Value to compare against. [Mandatory, except for ``EXISTS`` + tests.] + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + operator: 'RelationalOperator' = proto.Field( + proto.ENUM, + number=3, + enum='RelationalOperator', + ) + value: 'Value' = proto.Field( + proto.MESSAGE, + number=4, + message='Value', + ) + + class Conditions(proto.Message): + r"""A collection of conditions. + + Attributes: + conditions (MutableSequence[google.cloud.dlp_v2.types.RecordCondition.Condition]): + A collection of conditions. + """ + + conditions: MutableSequence['RecordCondition.Condition'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='RecordCondition.Condition', + ) + + class Expressions(proto.Message): + r"""An expression, consisting of an operator and conditions. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + logical_operator (google.cloud.dlp_v2.types.RecordCondition.Expressions.LogicalOperator): + The operator to apply to the result of conditions. Default + and currently only supported value is ``AND``. + conditions (google.cloud.dlp_v2.types.RecordCondition.Conditions): + Conditions to apply to the expression. + + This field is a member of `oneof`_ ``type``. + """ + class LogicalOperator(proto.Enum): + r"""Logical operators for conditional checks. + + Values: + LOGICAL_OPERATOR_UNSPECIFIED (0): + Unused + AND (1): + Conditional AND + """ + LOGICAL_OPERATOR_UNSPECIFIED = 0 + AND = 1 + + logical_operator: 'RecordCondition.Expressions.LogicalOperator' = proto.Field( + proto.ENUM, + number=1, + enum='RecordCondition.Expressions.LogicalOperator', + ) + conditions: 'RecordCondition.Conditions' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='RecordCondition.Conditions', + ) + + expressions: Expressions = proto.Field( + proto.MESSAGE, + number=3, + message=Expressions, + ) + + +class TransformationOverview(proto.Message): + r"""Overview of the modifications that occurred. + + Attributes: + transformed_bytes (int): + Total size in bytes that were transformed in + some way. + transformation_summaries (MutableSequence[google.cloud.dlp_v2.types.TransformationSummary]): + Transformations applied to the dataset. + """ + + transformed_bytes: int = proto.Field( + proto.INT64, + number=2, + ) + transformation_summaries: MutableSequence['TransformationSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='TransformationSummary', + ) + + +class TransformationSummary(proto.Message): + r"""Summary of a single transformation. Only one of 'transformation', + 'field_transformation', or 'record_suppress' will be set. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Set if the transformation was limited to a + specific InfoType. + field (google.cloud.dlp_v2.types.FieldId): + Set if the transformation was limited to a + specific FieldId. + transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): + The specific transformation these stats apply + to. + field_transformations (MutableSequence[google.cloud.dlp_v2.types.FieldTransformation]): + The field transformation that was applied. + If multiple field transformations are requested + for a single field, this list will contain all + of them; otherwise, only one is supplied. + record_suppress (google.cloud.dlp_v2.types.RecordSuppression): + The specific suppression option these stats + apply to. + results (MutableSequence[google.cloud.dlp_v2.types.TransformationSummary.SummaryResult]): + Collection of all transformations that took + place or had an error. + transformed_bytes (int): + Total size in bytes that were transformed in + some way. + """ + class TransformationResultCode(proto.Enum): + r"""Possible outcomes of transformations. + + Values: + TRANSFORMATION_RESULT_CODE_UNSPECIFIED (0): + Unused + SUCCESS (1): + Transformation completed without an error. + ERROR (2): + Transformation had an error. + """ + TRANSFORMATION_RESULT_CODE_UNSPECIFIED = 0 + SUCCESS = 1 + ERROR = 2 + + class SummaryResult(proto.Message): + r"""A collection that informs the user the number of times a particular + ``TransformationResultCode`` and error details occurred. + + Attributes: + count (int): + Number of transformations counted by this + result. + code (google.cloud.dlp_v2.types.TransformationSummary.TransformationResultCode): + Outcome of the transformation. + details (str): + A place for warnings or errors to show up if + a transformation didn't work as expected. + """ + + count: int = proto.Field( + proto.INT64, + number=1, + ) + code: 'TransformationSummary.TransformationResultCode' = proto.Field( + proto.ENUM, + number=2, + enum='TransformationSummary.TransformationResultCode', + ) + details: str = proto.Field( + proto.STRING, + number=3, + ) + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + transformation: 'PrimitiveTransformation' = proto.Field( + proto.MESSAGE, + number=3, + message='PrimitiveTransformation', + ) + field_transformations: MutableSequence['FieldTransformation'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='FieldTransformation', + ) + record_suppress: 'RecordSuppression' = proto.Field( + proto.MESSAGE, + number=6, + message='RecordSuppression', + ) + results: MutableSequence[SummaryResult] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=SummaryResult, + ) + transformed_bytes: int = proto.Field( + proto.INT64, + number=7, + ) + + +class TransformationDescription(proto.Message): + r"""A flattened description of a ``PrimitiveTransformation`` or + ``RecordSuppression``. + + Attributes: + type_ (google.cloud.dlp_v2.types.TransformationType): + The transformation type. + description (str): + A description of the transformation. This is empty for a + RECORD_SUPPRESSION, or is the output of calling toString() + on the ``PrimitiveTransformation`` protocol buffer message + for any other type of transformation. + condition (str): + A human-readable string representation of the + ``RecordCondition`` corresponding to this transformation. + Set if a ``RecordCondition`` was used to determine whether + or not to apply this transformation. + + Examples: \* (age_field > 85) \* (age_field <= 18) \* + (zip_field exists) \* (zip_field == 01234) && (city_field != + "Springville") \* (zip_field == 01234) && (age_field <= 18) + && (city_field exists) + info_type (google.cloud.dlp_v2.types.InfoType): + Set if the transformation was limited to a specific + ``InfoType``. + """ + + type_: 'TransformationType' = proto.Field( + proto.ENUM, + number=1, + enum='TransformationType', + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + condition: str = proto.Field( + proto.STRING, + number=3, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=4, + message=storage.InfoType, + ) + + +class TransformationDetails(proto.Message): + r"""Details about a single transformation. This object contains a + description of the transformation, information about whether the + transformation was successfully applied, and the precise + location where the transformation occurred. These details are + stored in a user-specified BigQuery table. + + Attributes: + resource_name (str): + The name of the job that completed the + transformation. + container_name (str): + The top level name of the container where the + transformation is located (this will be the + source file name or table name). + transformation (MutableSequence[google.cloud.dlp_v2.types.TransformationDescription]): + Description of transformation. This would only contain more + than one element if there were multiple matching + transformations and which one to apply was ambiguous. Not + set for states that contain no transformation, currently + only state that contains no transformation is + TransformationResultStateType.METADATA_UNRETRIEVABLE. + status_details (google.cloud.dlp_v2.types.TransformationResultStatus): + Status of the transformation, if + transformation was not successful, this will + specify what caused it to fail, otherwise it + will show that the transformation was + successful. + transformed_bytes (int): + The number of bytes that were transformed. If + transformation was unsuccessful or did not take + place because there was no content to transform, + this will be zero. + transformation_location (google.cloud.dlp_v2.types.TransformationLocation): + The precise location of the transformed + content in the original container. + """ + + resource_name: str = proto.Field( + proto.STRING, + number=1, + ) + container_name: str = proto.Field( + proto.STRING, + number=2, + ) + transformation: MutableSequence['TransformationDescription'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='TransformationDescription', + ) + status_details: 'TransformationResultStatus' = proto.Field( + proto.MESSAGE, + number=4, + message='TransformationResultStatus', + ) + transformed_bytes: int = proto.Field( + proto.INT64, + number=5, + ) + transformation_location: 'TransformationLocation' = proto.Field( + proto.MESSAGE, + number=6, + message='TransformationLocation', + ) + + +class TransformationLocation(proto.Message): + r"""Specifies the location of a transformation. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + finding_id (str): + For infotype transformations, link to the + corresponding findings ID so that location + information does not need to be duplicated. Each + findings ID correlates to an entry in the + findings output table, this table only gets + created when users specify to save findings (add + the save findings action to the request). + + This field is a member of `oneof`_ ``location_type``. + record_transformation (google.cloud.dlp_v2.types.RecordTransformation): + For record transformations, provide a field + and container information. + + This field is a member of `oneof`_ ``location_type``. + container_type (google.cloud.dlp_v2.types.TransformationContainerType): + Information about the functionality of the + container where this finding occurred, if + available. + """ + + finding_id: str = proto.Field( + proto.STRING, + number=1, + oneof='location_type', + ) + record_transformation: 'RecordTransformation' = proto.Field( + proto.MESSAGE, + number=2, + oneof='location_type', + message='RecordTransformation', + ) + container_type: 'TransformationContainerType' = proto.Field( + proto.ENUM, + number=3, + enum='TransformationContainerType', + ) + + +class RecordTransformation(proto.Message): + r""" + + Attributes: + field_id (google.cloud.dlp_v2.types.FieldId): + For record transformations, provide a field. + container_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Findings container modification timestamp, if + applicable. + container_version (str): + Container version, if available ("generation" + for Cloud Storage). + """ + + field_id: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + container_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + container_version: str = proto.Field( + proto.STRING, + number=3, + ) + + +class TransformationResultStatus(proto.Message): + r""" + + Attributes: + result_status_type (google.cloud.dlp_v2.types.TransformationResultStatusType): + Transformation result status type, this will + be either SUCCESS, or it will be the reason for + why the transformation was not completely + successful. + details (google.rpc.status_pb2.Status): + Detailed error codes and messages + """ + + result_status_type: 'TransformationResultStatusType' = proto.Field( + proto.ENUM, + number=1, + enum='TransformationResultStatusType', + ) + details: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + + +class TransformationDetailsStorageConfig(proto.Message): + r"""Config for storing transformation details. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + The BigQuery table in which to store the output. This may be + an existing table or in a new table in an existing dataset. + If table_id is not set a new one will be generated for you + with the following format: + dlp_googleapis_transformation_details_yyyy_mm_dd_[dlp_job_id]. + Pacific time zone will be used for generating the date + details. + + This field is a member of `oneof`_ ``type``. + """ + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.BigQueryTable, + ) + + +class Schedule(proto.Message): + r"""Schedule for inspect job triggers. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + recurrence_period_duration (google.protobuf.duration_pb2.Duration): + With this option a job is started on a + regular periodic basis. For example: every day + (86400 seconds). + A scheduled start time will be skipped if the + previous execution has not ended when its + scheduled time occurs. + This value must be set to a time duration + greater than or equal to 1 day and can be no + longer than 60 days. + + This field is a member of `oneof`_ ``option``. + """ + + recurrence_period_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + oneof='option', + message=duration_pb2.Duration, + ) + + +class Manual(proto.Message): + r"""Job trigger option for hybrid jobs. Jobs must be manually + created and finished. + + """ + + +class InspectTemplate(proto.Message): + r"""The inspectTemplate contains a configuration (set of types of + sensitive data to be detected) to be used anywhere you otherwise + would normally specify InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates to learn + more. + + Attributes: + name (str): + Output only. The template name. + + The template will have one of the following formats: + ``projects/PROJECT_ID/inspectTemplates/TEMPLATE_ID`` OR + ``organizations/ORGANIZATION_ID/inspectTemplates/TEMPLATE_ID``; + display_name (str): + Display name (max 256 chars). + description (str): + Short description (max 256 chars). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of an + inspectTemplate. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of an + inspectTemplate. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + The core content of the template. + Configuration of the scanning process. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=6, + message='InspectConfig', + ) + + +class DeidentifyTemplate(proto.Message): + r"""DeidentifyTemplates contains instructions on how to + de-identify content. See + https://cloud.google.com/dlp/docs/concepts-templates to learn + more. + + Attributes: + name (str): + Output only. The template name. + + The template will have one of the following formats: + ``projects/PROJECT_ID/deidentifyTemplates/TEMPLATE_ID`` OR + ``organizations/ORGANIZATION_ID/deidentifyTemplates/TEMPLATE_ID`` + display_name (str): + Display name (max 256 chars). + description (str): + Short description (max 256 chars). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of an + inspectTemplate. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of an + inspectTemplate. + deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): + The core content of the template. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + deidentify_config: 'DeidentifyConfig' = proto.Field( + proto.MESSAGE, + number=6, + message='DeidentifyConfig', + ) + + +class Error(proto.Message): + r"""Details information about an error encountered during job + execution or the results of an unsuccessful activation of the + JobTrigger. + + Attributes: + details (google.rpc.status_pb2.Status): + Detailed error codes and messages. + timestamps (MutableSequence[google.protobuf.timestamp_pb2.Timestamp]): + The times the error occurred. + """ + + details: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + timestamps: MutableSequence[timestamp_pb2.Timestamp] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class JobTrigger(proto.Message): + r"""Contains a configuration to make dlp api calls on a repeating + basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers to learn + more. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Unique resource name for the triggeredJob, assigned by the + service when the triggeredJob is created, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + display_name (str): + Display name (max 100 chars) + description (str): + User provided description (max 256 chars) + inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): + For inspect jobs, a snapshot of the + configuration. + + This field is a member of `oneof`_ ``job``. + triggers (MutableSequence[google.cloud.dlp_v2.types.JobTrigger.Trigger]): + A list of triggers which will be OR'ed + together. Only one in the list needs to trigger + for a job to be started. The list may contain + only a single Schedule trigger and must have at + least one object. + errors (MutableSequence[google.cloud.dlp_v2.types.Error]): + Output only. A stream of errors encountered + when the trigger was activated. Repeated errors + may result in the JobTrigger automatically being + paused. Will return the last 100 errors. + Whenever the JobTrigger is modified this list + will be cleared. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of a + triggeredJob. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of a + triggeredJob. + last_run_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp of the last time + this trigger executed. + status (google.cloud.dlp_v2.types.JobTrigger.Status): + Required. A status for this trigger. + """ + class Status(proto.Enum): + r"""Whether the trigger is currently active. If PAUSED or + CANCELLED, no jobs will be created with this configuration. The + service may automatically pause triggers experiencing frequent + errors. To restart a job, set the status to HEALTHY after + correcting user errors. + + Values: + STATUS_UNSPECIFIED (0): + Unused. + HEALTHY (1): + Trigger is healthy. + PAUSED (2): + Trigger is temporarily paused. + CANCELLED (3): + Trigger is cancelled and can not be resumed. + """ + STATUS_UNSPECIFIED = 0 + HEALTHY = 1 + PAUSED = 2 + CANCELLED = 3 + + class Trigger(proto.Message): + r"""What event needs to occur for a new job to be started. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + schedule (google.cloud.dlp_v2.types.Schedule): + Create a job on a repeating basis based on + the elapse of time. + + This field is a member of `oneof`_ ``trigger``. + manual (google.cloud.dlp_v2.types.Manual): + For use with hybrid jobs. Jobs must be + manually created and finished. + + This field is a member of `oneof`_ ``trigger``. + """ + + schedule: 'Schedule' = proto.Field( + proto.MESSAGE, + number=1, + oneof='trigger', + message='Schedule', + ) + manual: 'Manual' = proto.Field( + proto.MESSAGE, + number=2, + oneof='trigger', + message='Manual', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + inspect_job: 'InspectJobConfig' = proto.Field( + proto.MESSAGE, + number=4, + oneof='job', + message='InspectJobConfig', + ) + triggers: MutableSequence[Trigger] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=Trigger, + ) + errors: MutableSequence['Error'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='Error', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + last_run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + status: Status = proto.Field( + proto.ENUM, + number=10, + enum=Status, + ) + + +class Action(proto.Message): + r"""A task to execute on the completion of a job. + See https://cloud.google.com/dlp/docs/concepts-actions to learn + more. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + save_findings (google.cloud.dlp_v2.types.Action.SaveFindings): + Save resulting findings in a provided + location. + + This field is a member of `oneof`_ ``action``. + pub_sub (google.cloud.dlp_v2.types.Action.PublishToPubSub): + Publish a notification to a Pub/Sub topic. + + This field is a member of `oneof`_ ``action``. + publish_summary_to_cscc (google.cloud.dlp_v2.types.Action.PublishSummaryToCscc): + Publish summary to Cloud Security Command + Center (Alpha). + + This field is a member of `oneof`_ ``action``. + publish_findings_to_cloud_data_catalog (google.cloud.dlp_v2.types.Action.PublishFindingsToCloudDataCatalog): + Publish findings to Cloud Datahub. + + This field is a member of `oneof`_ ``action``. + deidentify (google.cloud.dlp_v2.types.Action.Deidentify): + Create a de-identified copy of the input + data. + + This field is a member of `oneof`_ ``action``. + job_notification_emails (google.cloud.dlp_v2.types.Action.JobNotificationEmails): + Sends an email when the job completes. The email goes to IAM + project owners and technical `Essential + Contacts `__. + + This field is a member of `oneof`_ ``action``. + publish_to_stackdriver (google.cloud.dlp_v2.types.Action.PublishToStackdriver): + Enable Stackdriver metric dlp.googleapis.com/finding_count. + + This field is a member of `oneof`_ ``action``. + """ + + class SaveFindings(proto.Message): + r"""If set, the detailed findings will be persisted to the + specified OutputStorageConfig. Only a single instance of this + action can be specified. + Compatible with: Inspect, Risk + + Attributes: + output_config (google.cloud.dlp_v2.types.OutputStorageConfig): + Location to store findings outside of DLP. + """ + + output_config: 'OutputStorageConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='OutputStorageConfig', + ) + + class PublishToPubSub(proto.Message): + r"""Publish a message into a given Pub/Sub topic when DlpJob has + completed. The message contains a single field, ``DlpJobName``, + which is equal to the finished job's + ```DlpJob.name`` `__. + Compatible with: Inspect, Risk + + Attributes: + topic (str): + Cloud Pub/Sub topic to send notifications to. + The topic must have given publishing access + rights to the DLP API service account executing + the long running DlpJob sending the + notifications. Format is + projects/{project}/topics/{topic}. + """ + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + + class PublishSummaryToCscc(proto.Message): + r"""Publish the result summary of a DlpJob to the Cloud Security + Command Center (CSCC Alpha). + This action is only available for projects which are parts of an + organization and whitelisted for the alpha Cloud Security + Command Center. + The action will publish the count of finding instances and their + info types. The summary of findings will be persisted in CSCC + and are governed by CSCC service-specific policy, see + https://cloud.google.com/terms/service-terms Only a single + instance of this action can be specified. Compatible with: + Inspect + + """ + + class PublishFindingsToCloudDataCatalog(proto.Message): + r"""Publish findings of a DlpJob to Data Catalog. In Data Catalog, tag + templates are applied to the resource that Cloud DLP scanned. Data + Catalog tag templates are stored in the same project and region + where the BigQuery table exists. For Cloud DLP to create and apply + the tag template, the Cloud DLP service agent must have the + ``roles/datacatalog.tagTemplateOwner`` permission on the project. + The tag template contains fields summarizing the results of the + DlpJob. Any field values previously written by another DlpJob are + deleted. [InfoType naming patterns][google.privacy.dlp.v2.InfoType] + are strictly enforced when using this feature. + + Findings are persisted in Data Catalog storage and are governed by + service-specific policies for Data Catalog. For more information, + see `Service Specific + Terms `__. + + Only a single instance of this action can be specified. This action + is allowed only if all resources being scanned are BigQuery tables. + Compatible with: Inspect + + """ + + class Deidentify(proto.Message): + r"""Create a de-identified copy of the requested table or files. + + A TransformationDetail will be created for each transformation. + + If any rows in BigQuery are skipped during de-identification + (transformation errors or row size exceeds BigQuery insert API + limits) they are placed in the failure output table. If the original + row exceeds the BigQuery insert API limit it will be truncated when + written to the failure output table. The failure output table can be + set in the + action.deidentify.output.big_query_output.deidentified_failure_output_table + field, if no table is set, a table will be automatically created in + the same project and dataset as the original table. + + Compatible with: Inspect + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + transformation_config (google.cloud.dlp_v2.types.TransformationConfig): + User specified deidentify templates and + configs for structured, unstructured, and image + files. + transformation_details_storage_config (google.cloud.dlp_v2.types.TransformationDetailsStorageConfig): + Config for storing transformation details. This is separate + from the de-identified content, and contains metadata about + the successful transformations and/or failures that occurred + while de-identifying. This needs to be set in order for + users to access information about the status of each + transformation (see + [TransformationDetails][google.privacy.dlp.v2.TransformationDetails] + message for more information about what is noted). + cloud_storage_output (str): + Required. User settable Cloud Storage bucket + and folders to store de-identified files. This + field must be set for cloud storage + deidentification. The output Cloud Storage + bucket must be different from the input bucket. + De-identified files will overwrite files in the + output path. + Form of: gs://bucket/folder/ or gs://bucket + + This field is a member of `oneof`_ ``output``. + file_types_to_transform (MutableSequence[google.cloud.dlp_v2.types.FileType]): + List of user-specified file type groups to transform. If + specified, only the files with these filetypes will be + transformed. If empty, all supported files will be + transformed. Supported types may be automatically added over + time. If a file type is set in this field that isn't + supported by the Deidentify action then the job will fail + and will not be successfully created/started. Currently the + only filetypes supported are: IMAGES, TEXT_FILES, CSV, TSV. + """ + + transformation_config: 'TransformationConfig' = proto.Field( + proto.MESSAGE, + number=7, + message='TransformationConfig', + ) + transformation_details_storage_config: 'TransformationDetailsStorageConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='TransformationDetailsStorageConfig', + ) + cloud_storage_output: str = proto.Field( + proto.STRING, + number=9, + oneof='output', + ) + file_types_to_transform: MutableSequence[storage.FileType] = proto.RepeatedField( + proto.ENUM, + number=8, + enum=storage.FileType, + ) + + class JobNotificationEmails(proto.Message): + r"""Sends an email when the job completes. The email goes to IAM project + owners and technical `Essential + Contacts `__. + + """ + + class PublishToStackdriver(proto.Message): + r"""Enable Stackdriver metric dlp.googleapis.com/finding_count. This + will publish a metric to stack driver on each infotype requested and + how many findings were found for it. CustomDetectors will be + bucketed as 'Custom' under the Stackdriver label 'info_type'. + + """ + + save_findings: SaveFindings = proto.Field( + proto.MESSAGE, + number=1, + oneof='action', + message=SaveFindings, + ) + pub_sub: PublishToPubSub = proto.Field( + proto.MESSAGE, + number=2, + oneof='action', + message=PublishToPubSub, + ) + publish_summary_to_cscc: PublishSummaryToCscc = proto.Field( + proto.MESSAGE, + number=3, + oneof='action', + message=PublishSummaryToCscc, + ) + publish_findings_to_cloud_data_catalog: PublishFindingsToCloudDataCatalog = proto.Field( + proto.MESSAGE, + number=5, + oneof='action', + message=PublishFindingsToCloudDataCatalog, + ) + deidentify: Deidentify = proto.Field( + proto.MESSAGE, + number=7, + oneof='action', + message=Deidentify, + ) + job_notification_emails: JobNotificationEmails = proto.Field( + proto.MESSAGE, + number=8, + oneof='action', + message=JobNotificationEmails, + ) + publish_to_stackdriver: PublishToStackdriver = proto.Field( + proto.MESSAGE, + number=9, + oneof='action', + message=PublishToStackdriver, + ) + + +class TransformationConfig(proto.Message): + r"""User specified templates and configs for how to deidentify + structured, unstructures, and image files. User must provide + either a unstructured deidentify template or at least one redact + image config. + + Attributes: + deidentify_template (str): + De-identify template. If this template is specified, it will + serve as the default de-identify template. This template + cannot contain ``record_transformations`` since it can be + used for unstructured content such as free-form text files. + If this template is not set, a default + ``ReplaceWithInfoTypeConfig`` will be used to de-identify + unstructured content. + structured_deidentify_template (str): + Structured de-identify template. If this template is + specified, it will serve as the de-identify template for + structured content such as delimited files and tables. If + this template is not set but the ``deidentify_template`` is + set, then ``deidentify_template`` will also apply to the + structured content. If neither template is set, a default + ``ReplaceWithInfoTypeConfig`` will be used to de-identify + structured content. + image_redact_template (str): + Image redact template. + If this template is specified, it will serve as + the de-identify template for images. If this + template is not set, all findings in the image + will be redacted with a black box. + """ + + deidentify_template: str = proto.Field( + proto.STRING, + number=1, + ) + structured_deidentify_template: str = proto.Field( + proto.STRING, + number=2, + ) + image_redact_template: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CreateInspectTemplateRequest(proto.Message): + r"""Request message for CreateInspectTemplate. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + Required. The InspectTemplate to create. + template_id (str): + The template id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_template: 'InspectTemplate' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectTemplate', + ) + template_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateInspectTemplateRequest(proto.Message): + r"""Request message for UpdateInspectTemplate. + + Attributes: + name (str): + Required. Resource name of organization and inspectTemplate + to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + New InspectTemplate value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_template: 'InspectTemplate' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectTemplate', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetInspectTemplateRequest(proto.Message): + r"""Request message for GetInspectTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListInspectTemplatesRequest(proto.Message): + r"""Request message for ListInspectTemplates. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ``ListInspectTemplates``. + page_size (int): + Size of the page, can be limited by the + server. If zero server returns a page of max + size 100. + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the template was + created. + - ``update_time``: corresponds to the time the template was + last updated. + - ``name``: corresponds to the template's name. + - ``display_name``: corresponds to the template's display + name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInspectTemplatesResponse(proto.Message): + r"""Response message for ListInspectTemplates. + + Attributes: + inspect_templates (MutableSequence[google.cloud.dlp_v2.types.InspectTemplate]): + List of inspectTemplates, up to page_size in + ListInspectTemplatesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListInspectTemplates request. + """ + + @property + def raw_page(self): + return self + + inspect_templates: MutableSequence['InspectTemplate'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='InspectTemplate', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteInspectTemplateRequest(proto.Message): + r"""Request message for DeleteInspectTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateJobTriggerRequest(proto.Message): + r"""Request message for CreateJobTrigger. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + Required. The JobTrigger to create. + trigger_id (str): + The trigger id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + job_trigger: 'JobTrigger' = proto.Field( + proto.MESSAGE, + number=2, + message='JobTrigger', + ) + trigger_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ActivateJobTriggerRequest(proto.Message): + r"""Request message for ActivateJobTrigger. + + Attributes: + name (str): + Required. Resource name of the trigger to activate, for + example ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateJobTriggerRequest(proto.Message): + r"""Request message for UpdateJobTrigger. + + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + New JobTrigger value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + job_trigger: 'JobTrigger' = proto.Field( + proto.MESSAGE, + number=2, + message='JobTrigger', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetJobTriggerRequest(proto.Message): + r"""Request message for GetJobTrigger. + + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDlpJobRequest(proto.Message): + r"""Request message for CreateDlpJobRequest. Used to initiate + long running jobs such as calculating risk metrics or inspecting + Google Cloud Storage. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): + An inspection job scans a storage repository + for InfoTypes. + + This field is a member of `oneof`_ ``job``. + risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): + A risk analysis job calculates + re-identification risk metrics for a BigQuery + table. + + This field is a member of `oneof`_ ``job``. + job_id (str): + The job id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_job: 'InspectJobConfig' = proto.Field( + proto.MESSAGE, + number=2, + oneof='job', + message='InspectJobConfig', + ) + risk_job: 'RiskAnalysisJobConfig' = proto.Field( + proto.MESSAGE, + number=3, + oneof='job', + message='RiskAnalysisJobConfig', + ) + job_id: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListJobTriggersRequest(proto.Message): + r"""Request message for ListJobTriggers. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ListJobTriggers. ``order_by`` field must not change for + subsequent calls. + page_size (int): + Size of the page, can be limited by a server. + order_by (str): + Comma separated list of triggeredJob fields to order by, + followed by ``asc`` or ``desc`` postfix. This list is + case-insensitive, default sorting order is ascending, + redundant space characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the JobTrigger + was created. + - ``update_time``: corresponds to the time the JobTrigger + was last updated. + - ``last_run_time``: corresponds to the last time the + JobTrigger ran. + - ``name``: corresponds to the JobTrigger's name. + - ``display_name``: corresponds to the JobTrigger's display + name. + - ``status``: corresponds to JobTrigger's status. + filter (str): + Allows filtering. + + Supported syntax: + + - Filter expressions are made up of one or more + restrictions. + - Restrictions can be combined by ``AND`` or ``OR`` logical + operators. A sequence of restrictions implicitly uses + ``AND``. + - A restriction has the form of + ``{field} {operator} {value}``. + - Supported fields/values for inspect triggers: + + - ``status`` - HEALTHY|PAUSED|CANCELLED + - ``inspected_storage`` - + DATASTORE|CLOUD_STORAGE|BIGQUERY + - 'last_run_time\` - RFC 3339 formatted timestamp, + surrounded by quotation marks. Nanoseconds are + ignored. + - 'error_count' - Number of errors that have occurred + while running. + + - The operator must be ``=`` or ``!=`` for status and + inspected_storage. + + Examples: + + - inspected_storage = cloud_storage AND status = HEALTHY + - inspected_storage = cloud_storage OR inspected_storage = + bigquery + - inspected_storage = cloud_storage AND (state = PAUSED OR + state = HEALTHY) + - last_run_time > "2017-12-12T00:00:00+00:00" + + The length of this field should be no more than 500 + characters. + type_ (google.cloud.dlp_v2.types.DlpJobType): + The type of jobs. Will use ``DlpJobType.INSPECT`` if not + set. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + type_: 'DlpJobType' = proto.Field( + proto.ENUM, + number=6, + enum='DlpJobType', + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ListJobTriggersResponse(proto.Message): + r"""Response message for ListJobTriggers. + + Attributes: + job_triggers (MutableSequence[google.cloud.dlp_v2.types.JobTrigger]): + List of triggeredJobs, up to page_size in + ListJobTriggersRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListJobTriggers request. + """ + + @property + def raw_page(self): + return self + + job_triggers: MutableSequence['JobTrigger'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='JobTrigger', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteJobTriggerRequest(proto.Message): + r"""Request message for DeleteJobTrigger. + + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class InspectJobConfig(proto.Message): + r"""Controls what and how to inspect for findings. + + Attributes: + storage_config (google.cloud.dlp_v2.types.StorageConfig): + The data to scan. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + How and what to scan for. + inspect_template_name (str): + If provided, will be used as the default for all values in + InspectConfig. ``inspect_config`` will be merged into the + values persisted as part of the template. + actions (MutableSequence[google.cloud.dlp_v2.types.Action]): + Actions to execute at the completion of the + job. + """ + + storage_config: storage.StorageConfig = proto.Field( + proto.MESSAGE, + number=1, + message=storage.StorageConfig, + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=3, + ) + actions: MutableSequence['Action'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='Action', + ) + + +class DataProfileAction(proto.Message): + r"""A task to execute when a data profile has been generated. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + export_data (google.cloud.dlp_v2.types.DataProfileAction.Export): + Export data profiles into a provided + location. + + This field is a member of `oneof`_ ``action``. + pub_sub_notification (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification): + Publish a message into the Pub/Sub topic. + + This field is a member of `oneof`_ ``action``. + """ + class EventType(proto.Enum): + r"""Types of event that can trigger an action. + + Values: + EVENT_TYPE_UNSPECIFIED (0): + Unused. + NEW_PROFILE (1): + New profile (not a re-profile). + CHANGED_PROFILE (2): + Changed one of the following profile metrics: + + - Table data risk score + - Table sensitivity score + - Table resource visibility + - Table encryption type + - Table predicted infoTypes + - Table other infoTypes + SCORE_INCREASED (3): + Table data risk score or sensitivity score + increased. + ERROR_CHANGED (4): + A user (non-internal) error occurred. + """ + EVENT_TYPE_UNSPECIFIED = 0 + NEW_PROFILE = 1 + CHANGED_PROFILE = 2 + SCORE_INCREASED = 3 + ERROR_CHANGED = 4 + + class Export(proto.Message): + r"""If set, the detailed data profiles will be persisted to the + location of your choice whenever updated. + + Attributes: + profile_table (google.cloud.dlp_v2.types.BigQueryTable): + Store all table and column profiles in an + existing table or a new table in an existing + dataset. Each re-generation will result in a new + row in BigQuery. + """ + + profile_table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=1, + message=storage.BigQueryTable, + ) + + class PubSubNotification(proto.Message): + r"""Send a Pub/Sub message into the given Pub/Sub topic to connect other + systems to data profile generation. The message payload data will be + the byte serialization of ``DataProfilePubSubMessage``. + + Attributes: + topic (str): + Cloud Pub/Sub topic to send notifications to. + Format is projects/{project}/topics/{topic}. + event (google.cloud.dlp_v2.types.DataProfileAction.EventType): + The type of event that triggers a Pub/Sub. At most one + ``PubSubNotification`` per EventType is permitted. + pubsub_condition (google.cloud.dlp_v2.types.DataProfilePubSubCondition): + Conditions (e.g., data risk or sensitivity + level) for triggering a Pub/Sub. + detail_of_message (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification.DetailLevel): + How much data to include in the Pub/Sub message. If the user + wishes to limit the size of the message, they can use + resource_name and fetch the profile fields they wish to. Per + table profile (not per column). + """ + class DetailLevel(proto.Enum): + r"""The levels of detail that can be included in the Pub/Sub + message. + + Values: + DETAIL_LEVEL_UNSPECIFIED (0): + Unused. + TABLE_PROFILE (1): + The full table data profile. + RESOURCE_NAME (2): + The resource name of the table. + """ + DETAIL_LEVEL_UNSPECIFIED = 0 + TABLE_PROFILE = 1 + RESOURCE_NAME = 2 + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + event: 'DataProfileAction.EventType' = proto.Field( + proto.ENUM, + number=2, + enum='DataProfileAction.EventType', + ) + pubsub_condition: 'DataProfilePubSubCondition' = proto.Field( + proto.MESSAGE, + number=3, + message='DataProfilePubSubCondition', + ) + detail_of_message: 'DataProfileAction.PubSubNotification.DetailLevel' = proto.Field( + proto.ENUM, + number=4, + enum='DataProfileAction.PubSubNotification.DetailLevel', + ) + + export_data: Export = proto.Field( + proto.MESSAGE, + number=1, + oneof='action', + message=Export, + ) + pub_sub_notification: PubSubNotification = proto.Field( + proto.MESSAGE, + number=2, + oneof='action', + message=PubSubNotification, + ) + + +class DataProfileJobConfig(proto.Message): + r"""Configuration for setting up a job to scan resources for profile + generation. Only one data profile configuration may exist per + organization, folder, or project. + + The generated data profiles are retained according to the [data + retention policy] + (https://cloud.google.com/dlp/docs/data-profiles#retention). + + Attributes: + location (google.cloud.dlp_v2.types.DataProfileLocation): + The data to scan. + project_id (str): + The project that will run the scan. The DLP + service account that exists within this project + must have access to all resources that are + profiled, and the Cloud DLP API must be enabled. + inspect_templates (MutableSequence[str]): + Detection logic for profile generation. + + Not all template features are used by profiles. + FindingLimits, include_quote and exclude_info_types have no + impact on data profiling. + + Multiple templates may be provided if there is data in + multiple regions. At most one template must be specified + per-region (including "global"). Each region is scanned + using the applicable template. If no region-specific + template is specified, but a "global" template is specified, + it will be copied to that region and used instead. If no + global or region-specific template is provided for a region + with data, that region's data will not be scanned. + + For more information, see + https://cloud.google.com/dlp/docs/data-profiles#data_residency. + data_profile_actions (MutableSequence[google.cloud.dlp_v2.types.DataProfileAction]): + Actions to execute at the completion of the + job. + """ + + location: 'DataProfileLocation' = proto.Field( + proto.MESSAGE, + number=1, + message='DataProfileLocation', + ) + project_id: str = proto.Field( + proto.STRING, + number=5, + ) + inspect_templates: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + data_profile_actions: MutableSequence['DataProfileAction'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='DataProfileAction', + ) + + +class DataProfileLocation(proto.Message): + r"""The data that will be profiled. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + organization_id (int): + The ID of an organization to scan. + + This field is a member of `oneof`_ ``location``. + folder_id (int): + The ID of the Folder within an organization + to scan. + + This field is a member of `oneof`_ ``location``. + """ + + organization_id: int = proto.Field( + proto.INT64, + number=1, + oneof='location', + ) + folder_id: int = proto.Field( + proto.INT64, + number=2, + oneof='location', + ) + + +class DlpJob(proto.Message): + r"""Combines all of the information about a DLP job. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The server-assigned name. + type_ (google.cloud.dlp_v2.types.DlpJobType): + The type of job. + state (google.cloud.dlp_v2.types.DlpJob.JobState): + State of a job. + risk_details (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails): + Results from analyzing risk of a data source. + + This field is a member of `oneof`_ ``details``. + inspect_details (google.cloud.dlp_v2.types.InspectDataSourceDetails): + Results from inspecting a data source. + + This field is a member of `oneof`_ ``details``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job was created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job finished. + job_trigger_name (str): + If created by a job trigger, the resource + name of the trigger that instantiated the job. + errors (MutableSequence[google.cloud.dlp_v2.types.Error]): + A stream of errors encountered running the + job. + """ + class JobState(proto.Enum): + r"""Possible states of a job. New items may be added. + + Values: + JOB_STATE_UNSPECIFIED (0): + Unused. + PENDING (1): + The job has not yet started. + RUNNING (2): + The job is currently running. Once a job has + finished it will transition to FAILED or DONE. + DONE (3): + The job is no longer running. + CANCELED (4): + The job was canceled before it could be + completed. + FAILED (5): + The job had an error and did not complete. + ACTIVE (6): + The job is currently accepting findings via + hybridInspect. A hybrid job in ACTIVE state may + continue to have findings added to it through + the calling of hybridInspect. After the job has + finished no more calls to hybridInspect may be + made. ACTIVE jobs can transition to DONE. + """ + JOB_STATE_UNSPECIFIED = 0 + PENDING = 1 + RUNNING = 2 + DONE = 3 + CANCELED = 4 + FAILED = 5 + ACTIVE = 6 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'DlpJobType' = proto.Field( + proto.ENUM, + number=2, + enum='DlpJobType', + ) + state: JobState = proto.Field( + proto.ENUM, + number=3, + enum=JobState, + ) + risk_details: 'AnalyzeDataSourceRiskDetails' = proto.Field( + proto.MESSAGE, + number=4, + oneof='details', + message='AnalyzeDataSourceRiskDetails', + ) + inspect_details: 'InspectDataSourceDetails' = proto.Field( + proto.MESSAGE, + number=5, + oneof='details', + message='InspectDataSourceDetails', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + job_trigger_name: str = proto.Field( + proto.STRING, + number=10, + ) + errors: MutableSequence['Error'] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message='Error', + ) + + +class GetDlpJobRequest(proto.Message): + r"""The request message for [DlpJobs.GetDlpJob][]. + + Attributes: + name (str): + Required. The name of the DlpJob resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDlpJobsRequest(proto.Message): + r"""The request message for listing DLP jobs. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + filter (str): + Allows filtering. + + Supported syntax: + + - Filter expressions are made up of one or more + restrictions. + - Restrictions can be combined by ``AND`` or ``OR`` logical + operators. A sequence of restrictions implicitly uses + ``AND``. + - A restriction has the form of + ``{field} {operator} {value}``. + - Supported fields/values for inspect jobs: + + - ``state`` - PENDING|RUNNING|CANCELED|FINISHED|FAILED + - ``inspected_storage`` - + DATASTORE|CLOUD_STORAGE|BIGQUERY + - ``trigger_name`` - The name of the trigger that + created the job. + - 'end_time\` - Corresponds to the time the job + finished. + - 'start_time\` - Corresponds to the time the job + finished. + + - Supported fields for risk analysis jobs: + + - ``state`` - RUNNING|CANCELED|FINISHED|FAILED + - 'end_time\` - Corresponds to the time the job + finished. + - 'start_time\` - Corresponds to the time the job + finished. + + - The operator must be ``=`` or ``!=``. + + Examples: + + - inspected_storage = cloud_storage AND state = done + - inspected_storage = cloud_storage OR inspected_storage = + bigquery + - inspected_storage = cloud_storage AND (state = done OR + state = canceled) + - end_time > "2017-12-12T00:00:00+00:00" + + The length of this field should be no more than 500 + characters. + page_size (int): + The standard list page size. + page_token (str): + The standard list page token. + type_ (google.cloud.dlp_v2.types.DlpJobType): + The type of job. Defaults to ``DlpJobType.INSPECT`` + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc, end_time asc, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the job was + created. + - ``end_time``: corresponds to the time the job ended. + - ``name``: corresponds to the job's name. + - ``state``: corresponds to ``state`` + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + type_: 'DlpJobType' = proto.Field( + proto.ENUM, + number=5, + enum='DlpJobType', + ) + order_by: str = proto.Field( + proto.STRING, + number=6, + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ListDlpJobsResponse(proto.Message): + r"""The response message for listing DLP jobs. + + Attributes: + jobs (MutableSequence[google.cloud.dlp_v2.types.DlpJob]): + A list of DlpJobs that matches the specified + filter in the request. + next_page_token (str): + The standard List next-page token. + """ + + @property + def raw_page(self): + return self + + jobs: MutableSequence['DlpJob'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DlpJob', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CancelDlpJobRequest(proto.Message): + r"""The request message for canceling a DLP job. + + Attributes: + name (str): + Required. The name of the DlpJob resource to + be cancelled. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FinishDlpJobRequest(proto.Message): + r"""The request message for finishing a DLP hybrid job. + + Attributes: + name (str): + Required. The name of the DlpJob resource to + be cancelled. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteDlpJobRequest(proto.Message): + r"""The request message for deleting a DLP job. + + Attributes: + name (str): + Required. The name of the DlpJob resource to + be deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDeidentifyTemplateRequest(proto.Message): + r"""Request message for CreateDeidentifyTemplate. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + Required. The DeidentifyTemplate to create. + template_id (str): + The template id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + deidentify_template: 'DeidentifyTemplate' = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyTemplate', + ) + template_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateDeidentifyTemplateRequest(proto.Message): + r"""Request message for UpdateDeidentifyTemplate. + + Attributes: + name (str): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + New DeidentifyTemplate value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + deidentify_template: 'DeidentifyTemplate' = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyTemplate', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetDeidentifyTemplateRequest(proto.Message): + r"""Request message for GetDeidentifyTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and deidentify + template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDeidentifyTemplatesRequest(proto.Message): + r"""Request message for ListDeidentifyTemplates. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ``ListDeidentifyTemplates``. + page_size (int): + Size of the page, can be limited by the + server. If zero server returns a page of max + size 100. + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the template was + created. + - ``update_time``: corresponds to the time the template was + last updated. + - ``name``: corresponds to the template's name. + - ``display_name``: corresponds to the template's display + name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDeidentifyTemplatesResponse(proto.Message): + r"""Response message for ListDeidentifyTemplates. + + Attributes: + deidentify_templates (MutableSequence[google.cloud.dlp_v2.types.DeidentifyTemplate]): + List of deidentify templates, up to page_size in + ListDeidentifyTemplatesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListDeidentifyTemplates request. + """ + + @property + def raw_page(self): + return self + + deidentify_templates: MutableSequence['DeidentifyTemplate'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DeidentifyTemplate', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteDeidentifyTemplateRequest(proto.Message): + r"""Request message for DeleteDeidentifyTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and deidentify + template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LargeCustomDictionaryConfig(proto.Message): + r"""Configuration for a custom dictionary created from a data source of + any size up to the maximum size defined in the + `limits `__ page. The artifacts + of dictionary creation are stored in the specified Cloud Storage + location. Consider using ``CustomInfoType.Dictionary`` for smaller + dictionaries that satisfy the size requirements. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + output_path (google.cloud.dlp_v2.types.CloudStoragePath): + Location to store dictionary artifacts in + Cloud Storage. These files will only be + accessible by project owners and the DLP API. If + any of these artifacts are modified, the + dictionary is considered invalid and can no + longer be used. + cloud_storage_file_set (google.cloud.dlp_v2.types.CloudStorageFileSet): + Set of files containing newline-delimited + lists of dictionary phrases. + + This field is a member of `oneof`_ ``source``. + big_query_field (google.cloud.dlp_v2.types.BigQueryField): + Field in a BigQuery table where each cell + represents a dictionary phrase. + + This field is a member of `oneof`_ ``source``. + """ + + output_path: storage.CloudStoragePath = proto.Field( + proto.MESSAGE, + number=1, + message=storage.CloudStoragePath, + ) + cloud_storage_file_set: storage.CloudStorageFileSet = proto.Field( + proto.MESSAGE, + number=2, + oneof='source', + message=storage.CloudStorageFileSet, + ) + big_query_field: storage.BigQueryField = proto.Field( + proto.MESSAGE, + number=3, + oneof='source', + message=storage.BigQueryField, + ) + + +class LargeCustomDictionaryStats(proto.Message): + r"""Summary statistics of a custom dictionary. + + Attributes: + approx_num_phrases (int): + Approximate number of distinct phrases in the + dictionary. + """ + + approx_num_phrases: int = proto.Field( + proto.INT64, + number=1, + ) + + +class StoredInfoTypeConfig(proto.Message): + r"""Configuration for stored infoTypes. All fields and subfield + are provided by the user. For more information, see + https://cloud.google.com/dlp/docs/creating-custom-infotypes. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + display_name (str): + Display name of the StoredInfoType (max 256 + characters). + description (str): + Description of the StoredInfoType (max 256 + characters). + large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryConfig): + StoredInfoType where findings are defined by + a dictionary of phrases. + + This field is a member of `oneof`_ ``type``. + dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): + Store dictionary-based CustomInfoType. + + This field is a member of `oneof`_ ``type``. + regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Store regular expression-based + StoredInfoType. + + This field is a member of `oneof`_ ``type``. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + large_custom_dictionary: 'LargeCustomDictionaryConfig' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='LargeCustomDictionaryConfig', + ) + dictionary: storage.CustomInfoType.Dictionary = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message=storage.CustomInfoType.Dictionary, + ) + regex: storage.CustomInfoType.Regex = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message=storage.CustomInfoType.Regex, + ) + + +class StoredInfoTypeStats(proto.Message): + r"""Statistics for a StoredInfoType. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryStats): + StoredInfoType where findings are defined by + a dictionary of phrases. + + This field is a member of `oneof`_ ``type``. + """ + + large_custom_dictionary: 'LargeCustomDictionaryStats' = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message='LargeCustomDictionaryStats', + ) + + +class StoredInfoTypeVersion(proto.Message): + r"""Version of a StoredInfoType, including the configuration used + to build it, create timestamp, and current state. + + Attributes: + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + StoredInfoType configuration. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Create timestamp of the version. Read-only, + determined by the system when the version is + created. + state (google.cloud.dlp_v2.types.StoredInfoTypeState): + Stored info type version state. Read-only, + updated by the system during dictionary + creation. + errors (MutableSequence[google.cloud.dlp_v2.types.Error]): + Errors that occurred when creating this storedInfoType + version, or anomalies detected in the storedInfoType data + that render it unusable. Only the five most recent errors + will be displayed, with the most recent error appearing + first. + + For example, some of the data for stored custom dictionaries + is put in the user's Cloud Storage bucket, and if this data + is modified or deleted by the user or another system, the + dictionary becomes invalid. + + If any errors occur, fix the problem indicated by the error + message and use the UpdateStoredInfoType API method to + create another version of the storedInfoType to continue + using it, reusing the same ``config`` if it was not the + source of the error. + stats (google.cloud.dlp_v2.types.StoredInfoTypeStats): + Statistics about this storedInfoType version. + """ + + config: 'StoredInfoTypeConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='StoredInfoTypeConfig', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: 'StoredInfoTypeState' = proto.Field( + proto.ENUM, + number=3, + enum='StoredInfoTypeState', + ) + errors: MutableSequence['Error'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='Error', + ) + stats: 'StoredInfoTypeStats' = proto.Field( + proto.MESSAGE, + number=5, + message='StoredInfoTypeStats', + ) + + +class StoredInfoType(proto.Message): + r"""StoredInfoType resource message that contains information + about the current version and any pending updates. + + Attributes: + name (str): + Resource name. + current_version (google.cloud.dlp_v2.types.StoredInfoTypeVersion): + Current version of the stored info type. + pending_versions (MutableSequence[google.cloud.dlp_v2.types.StoredInfoTypeVersion]): + Pending versions of the stored info type. + Empty if no versions are pending. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + current_version: 'StoredInfoTypeVersion' = proto.Field( + proto.MESSAGE, + number=2, + message='StoredInfoTypeVersion', + ) + pending_versions: MutableSequence['StoredInfoTypeVersion'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='StoredInfoTypeVersion', + ) + + +class CreateStoredInfoTypeRequest(proto.Message): + r"""Request message for CreateStoredInfoType. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Required. Configuration of the storedInfoType + to create. + stored_info_type_id (str): + The storedInfoType ID can contain uppercase and lowercase + letters, numbers, and hyphens; that is, it must match the + regular expression: ``[a-zA-Z\d-_]+``. The maximum length is + 100 characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + config: 'StoredInfoTypeConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='StoredInfoTypeConfig', + ) + stored_info_type_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateStoredInfoTypeRequest(proto.Message): + r"""Request message for UpdateStoredInfoType. + + Attributes: + name (str): + Required. Resource name of organization and storedInfoType + to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Updated configuration for the storedInfoType. + If not provided, a new version of the + storedInfoType will be created with the existing + configuration. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + config: 'StoredInfoTypeConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='StoredInfoTypeConfig', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetStoredInfoTypeRequest(proto.Message): + r"""Request message for GetStoredInfoType. + + Attributes: + name (str): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListStoredInfoTypesRequest(proto.Message): + r"""Request message for ListStoredInfoTypes. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ``ListStoredInfoTypes``. + page_size (int): + Size of the page, can be limited by the + server. If zero server returns a page of max + size 100. + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc, display_name, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the most recent + version of the resource was created. + - ``state``: corresponds to the state of the resource. + - ``name``: corresponds to resource name. + - ``display_name``: corresponds to info type's display + name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListStoredInfoTypesResponse(proto.Message): + r"""Response message for ListStoredInfoTypes. + + Attributes: + stored_info_types (MutableSequence[google.cloud.dlp_v2.types.StoredInfoType]): + List of storedInfoTypes, up to page_size in + ListStoredInfoTypesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListStoredInfoTypes request. + """ + + @property + def raw_page(self): + return self + + stored_info_types: MutableSequence['StoredInfoType'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='StoredInfoType', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteStoredInfoTypeRequest(proto.Message): + r"""Request message for DeleteStoredInfoType. + + Attributes: + name (str): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class HybridInspectJobTriggerRequest(proto.Message): + r"""Request to search for potentially sensitive info in a custom + location. + + Attributes: + name (str): + Required. Resource name of the trigger to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): + The item to inspect. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + hybrid_item: 'HybridContentItem' = proto.Field( + proto.MESSAGE, + number=3, + message='HybridContentItem', + ) + + +class HybridInspectDlpJobRequest(proto.Message): + r"""Request to search for potentially sensitive info in a custom + location. + + Attributes: + name (str): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): + The item to inspect. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + hybrid_item: 'HybridContentItem' = proto.Field( + proto.MESSAGE, + number=3, + message='HybridContentItem', + ) + + +class HybridContentItem(proto.Message): + r"""An individual hybrid item to inspect. Will be stored + temporarily during processing. + + Attributes: + item (google.cloud.dlp_v2.types.ContentItem): + The item to inspect. + finding_details (google.cloud.dlp_v2.types.HybridFindingDetails): + Supplementary information that will be added + to each finding. + """ + + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=1, + message='ContentItem', + ) + finding_details: 'HybridFindingDetails' = proto.Field( + proto.MESSAGE, + number=2, + message='HybridFindingDetails', + ) + + +class HybridFindingDetails(proto.Message): + r"""Populate to associate additional data with each finding. + + Attributes: + container_details (google.cloud.dlp_v2.types.Container): + Details about the container where the content + being inspected is from. + file_offset (int): + Offset in bytes of the line, from the + beginning of the file, where the finding is + located. Populate if the item being scanned is + only part of a bigger item, such as a shard of a + file and you want to track the absolute position + of the finding. + row_offset (int): + Offset of the row for tables. Populate if the + row(s) being scanned are part of a bigger + dataset and you want to keep track of their + absolute position. + table_options (google.cloud.dlp_v2.types.TableOptions): + If the container is a table, additional information to make + findings meaningful such as the columns that are primary + keys. If not known ahead of time, can also be set within + each inspect hybrid call and the two will be merged. Note + that identifying_fields will only be stored to BigQuery, and + only if the BigQuery action has been included. + labels (MutableMapping[str, str]): + Labels to represent user provided metadata about the data + being inspected. If configured by the job, some key values + may be required. The labels associated with ``Finding``'s + produced by hybrid inspection. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + """ + + container_details: 'Container' = proto.Field( + proto.MESSAGE, + number=1, + message='Container', + ) + file_offset: int = proto.Field( + proto.INT64, + number=2, + ) + row_offset: int = proto.Field( + proto.INT64, + number=3, + ) + table_options: storage.TableOptions = proto.Field( + proto.MESSAGE, + number=4, + message=storage.TableOptions, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + + +class HybridInspectResponse(proto.Message): + r"""Quota exceeded errors will be thrown once quota has been met. + """ + + +class DataRiskLevel(proto.Message): + r"""Score is a summary of all elements in the data profile. + A higher number means more risk. + + Attributes: + score (google.cloud.dlp_v2.types.DataRiskLevel.DataRiskLevelScore): + The score applied to the resource. + """ + class DataRiskLevelScore(proto.Enum): + r"""Various score levels for resources. + + Values: + RISK_SCORE_UNSPECIFIED (0): + Unused. + RISK_LOW (10): + Low risk - Lower indication of sensitive data + that appears to have additional access + restrictions in place or no indication of + sensitive data found. + RISK_MODERATE (20): + Medium risk - Sensitive data may be present + but additional access or fine grain access + restrictions appear to be present. Consider + limiting access even further or transform data + to mask. + RISK_HIGH (30): + High risk – SPII may be present. Access + controls may include public ACLs. Exfiltration + of data may lead to user data loss. + Re-identification of users may be possible. + Consider limiting usage and or removing SPII. + """ + RISK_SCORE_UNSPECIFIED = 0 + RISK_LOW = 10 + RISK_MODERATE = 20 + RISK_HIGH = 30 + + score: DataRiskLevelScore = proto.Field( + proto.ENUM, + number=1, + enum=DataRiskLevelScore, + ) + + +class DataProfileConfigSnapshot(proto.Message): + r"""Snapshot of the configurations used to generate the profile. + + Attributes: + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + A copy of the inspection config used to generate this + profile. This is a copy of the inspect_template specified in + ``DataProfileJobConfig``. + data_profile_job (google.cloud.dlp_v2.types.DataProfileJobConfig): + A copy of the configuration used to generate + this profile. + """ + + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + data_profile_job: 'DataProfileJobConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='DataProfileJobConfig', + ) + + +class TableDataProfile(proto.Message): + r"""The profile for a scanned table. + + Attributes: + name (str): + The name of the profile. + project_data_profile (str): + The resource name to the project data profile + for this table. + dataset_project_id (str): + The GCP project ID that owns the BigQuery + dataset. + dataset_location (str): + The BigQuery location where the dataset's + data is stored. See + https://cloud.google.com/bigquery/docs/locations + for supported locations. + dataset_id (str): + The BigQuery dataset ID. + table_id (str): + The BigQuery table ID. + full_resource (str): + The resource name of the table. + https://cloud.google.com/apis/design/resource_names#full_resource_name + profile_status (google.cloud.dlp_v2.types.ProfileStatus): + Success or error status from the most recent + profile generation attempt. May be empty if the + profile is still being generated. + state (google.cloud.dlp_v2.types.TableDataProfile.State): + State of a profile. + sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): + The sensitivity score of this table. + data_risk_level (google.cloud.dlp_v2.types.DataRiskLevel): + The data risk level of this table. + predicted_info_types (MutableSequence[google.cloud.dlp_v2.types.InfoTypeSummary]): + The infoTypes predicted from this table's + data. + other_info_types (MutableSequence[google.cloud.dlp_v2.types.OtherInfoTypeSummary]): + Other infoTypes found in this table's data. + config_snapshot (google.cloud.dlp_v2.types.DataProfileConfigSnapshot): + The snapshot of the configurations used to + generate the profile. + last_modified_time (google.protobuf.timestamp_pb2.Timestamp): + The time when this table was last modified + expiration_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time when this table expires. + scanned_column_count (int): + The number of columns profiled in the table. + failed_column_count (int): + The number of columns skipped in the table + because of an error. + table_size_bytes (int): + The size of the table when the profile was + generated. + row_count (int): + Number of rows in the table when the profile + was generated. This will not be populated for + BigLake tables. + encryption_status (google.cloud.dlp_v2.types.EncryptionStatus): + How the table is encrypted. + resource_visibility (google.cloud.dlp_v2.types.ResourceVisibility): + How broadly a resource has been shared. + profile_last_generated (google.protobuf.timestamp_pb2.Timestamp): + The last time the profile was generated. + resource_labels (MutableMapping[str, str]): + The labels applied to the resource at the + time the profile was generated. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the table was created. + """ + class State(proto.Enum): + r"""Possible states of a profile. New items may be added. + + Values: + STATE_UNSPECIFIED (0): + Unused. + RUNNING (1): + The profile is currently running. Once a + profile has finished it will transition to DONE. + DONE (2): + The profile is no longer generating. If + profile_status.status.code is 0, the profile succeeded, + otherwise, it failed. + """ + STATE_UNSPECIFIED = 0 + RUNNING = 1 + DONE = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + project_data_profile: str = proto.Field( + proto.STRING, + number=2, + ) + dataset_project_id: str = proto.Field( + proto.STRING, + number=24, + ) + dataset_location: str = proto.Field( + proto.STRING, + number=29, + ) + dataset_id: str = proto.Field( + proto.STRING, + number=25, + ) + table_id: str = proto.Field( + proto.STRING, + number=26, + ) + full_resource: str = proto.Field( + proto.STRING, + number=3, + ) + profile_status: 'ProfileStatus' = proto.Field( + proto.MESSAGE, + number=21, + message='ProfileStatus', + ) + state: State = proto.Field( + proto.ENUM, + number=22, + enum=State, + ) + sensitivity_score: storage.SensitivityScore = proto.Field( + proto.MESSAGE, + number=5, + message=storage.SensitivityScore, + ) + data_risk_level: 'DataRiskLevel' = proto.Field( + proto.MESSAGE, + number=6, + message='DataRiskLevel', + ) + predicted_info_types: MutableSequence['InfoTypeSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=27, + message='InfoTypeSummary', + ) + other_info_types: MutableSequence['OtherInfoTypeSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=28, + message='OtherInfoTypeSummary', + ) + config_snapshot: 'DataProfileConfigSnapshot' = proto.Field( + proto.MESSAGE, + number=7, + message='DataProfileConfigSnapshot', + ) + last_modified_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + expiration_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + scanned_column_count: int = proto.Field( + proto.INT64, + number=10, + ) + failed_column_count: int = proto.Field( + proto.INT64, + number=11, + ) + table_size_bytes: int = proto.Field( + proto.INT64, + number=12, + ) + row_count: int = proto.Field( + proto.INT64, + number=13, + ) + encryption_status: 'EncryptionStatus' = proto.Field( + proto.ENUM, + number=14, + enum='EncryptionStatus', + ) + resource_visibility: 'ResourceVisibility' = proto.Field( + proto.ENUM, + number=15, + enum='ResourceVisibility', + ) + profile_last_generated: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=16, + message=timestamp_pb2.Timestamp, + ) + resource_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=17, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=23, + message=timestamp_pb2.Timestamp, + ) + + +class ProfileStatus(proto.Message): + r""" + + Attributes: + status (google.rpc.status_pb2.Status): + Profiling status code and optional message + timestamp (google.protobuf.timestamp_pb2.Timestamp): + Time when the profile generation status was + updated + """ + + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class InfoTypeSummary(proto.Message): + r"""The infoType details for this column. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + The infoType. + estimated_prevalence (int): + Not populated for predicted infotypes. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + estimated_prevalence: int = proto.Field( + proto.INT32, + number=2, + ) + + +class OtherInfoTypeSummary(proto.Message): + r"""Infotype details for other infoTypes found within a column. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + The other infoType. + estimated_prevalence (int): + Approximate percentage of non-null rows that + contained data detected by this infotype. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + estimated_prevalence: int = proto.Field( + proto.INT32, + number=2, + ) + + +class DataProfilePubSubCondition(proto.Message): + r"""A condition for determining whether a Pub/Sub should be + triggered. + + Attributes: + expressions (google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubExpressions): + An expression. + """ + class ProfileScoreBucket(proto.Enum): + r"""Various score levels for resources. + + Values: + PROFILE_SCORE_BUCKET_UNSPECIFIED (0): + Unused. + HIGH (1): + High risk/sensitivity detected. + MEDIUM_OR_HIGH (2): + Medium or high risk/sensitivity detected. + """ + PROFILE_SCORE_BUCKET_UNSPECIFIED = 0 + HIGH = 1 + MEDIUM_OR_HIGH = 2 + + class PubSubCondition(proto.Message): + r"""A condition consisting of a value. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + minimum_risk_score (google.cloud.dlp_v2.types.DataProfilePubSubCondition.ProfileScoreBucket): + The minimum data risk score that triggers the + condition. + + This field is a member of `oneof`_ ``value``. + minimum_sensitivity_score (google.cloud.dlp_v2.types.DataProfilePubSubCondition.ProfileScoreBucket): + The minimum sensitivity level that triggers + the condition. + + This field is a member of `oneof`_ ``value``. + """ + + minimum_risk_score: 'DataProfilePubSubCondition.ProfileScoreBucket' = proto.Field( + proto.ENUM, + number=1, + oneof='value', + enum='DataProfilePubSubCondition.ProfileScoreBucket', + ) + minimum_sensitivity_score: 'DataProfilePubSubCondition.ProfileScoreBucket' = proto.Field( + proto.ENUM, + number=2, + oneof='value', + enum='DataProfilePubSubCondition.ProfileScoreBucket', + ) + + class PubSubExpressions(proto.Message): + r"""An expression, consisting of an operator and conditions. + + Attributes: + logical_operator (google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator): + The operator to apply to the collection of + conditions. + conditions (MutableSequence[google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubCondition]): + Conditions to apply to the expression. + """ + class PubSubLogicalOperator(proto.Enum): + r"""Logical operators for conditional checks. + + Values: + LOGICAL_OPERATOR_UNSPECIFIED (0): + Unused. + OR (1): + Conditional OR. + AND (2): + Conditional AND. + """ + LOGICAL_OPERATOR_UNSPECIFIED = 0 + OR = 1 + AND = 2 + + logical_operator: 'DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator' = proto.Field( + proto.ENUM, + number=1, + enum='DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator', + ) + conditions: MutableSequence['DataProfilePubSubCondition.PubSubCondition'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='DataProfilePubSubCondition.PubSubCondition', + ) + + expressions: PubSubExpressions = proto.Field( + proto.MESSAGE, + number=1, + message=PubSubExpressions, + ) + + +class DataProfilePubSubMessage(proto.Message): + r"""Pub/Sub topic message for a + DataProfileAction.PubSubNotification event. To receive a message + of protocol buffer schema type, convert the message data to an + object of this proto class. + + Attributes: + profile (google.cloud.dlp_v2.types.TableDataProfile): + If ``DetailLevel`` is ``TABLE_PROFILE`` this will be fully + populated. Otherwise, if ``DetailLevel`` is + ``RESOURCE_NAME``, then only ``name`` and ``full_resource`` + will be populated. + event (google.cloud.dlp_v2.types.DataProfileAction.EventType): + The event that caused the Pub/Sub message to + be sent. + """ + + profile: 'TableDataProfile' = proto.Field( + proto.MESSAGE, + number=1, + message='TableDataProfile', + ) + event: 'DataProfileAction.EventType' = proto.Field( + proto.ENUM, + number=2, + enum='DataProfileAction.EventType', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py new file mode 100644 index 00000000..fdb81846 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py @@ -0,0 +1,1474 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.privacy.dlp.v2', + manifest={ + 'Likelihood', + 'FileType', + 'InfoType', + 'SensitivityScore', + 'StoredType', + 'CustomInfoType', + 'FieldId', + 'PartitionId', + 'KindExpression', + 'DatastoreOptions', + 'CloudStorageRegexFileSet', + 'CloudStorageOptions', + 'CloudStorageFileSet', + 'CloudStoragePath', + 'BigQueryOptions', + 'StorageConfig', + 'HybridOptions', + 'BigQueryKey', + 'DatastoreKey', + 'Key', + 'RecordKey', + 'BigQueryTable', + 'BigQueryField', + 'EntityId', + 'TableOptions', + }, +) + + +class Likelihood(proto.Enum): + r"""Categorization of results based on how likely they are to + represent a match, based on the number of elements they contain + which imply a match. + + Values: + LIKELIHOOD_UNSPECIFIED (0): + Default value; same as POSSIBLE. + VERY_UNLIKELY (1): + Few matching elements. + UNLIKELY (2): + No description available. + POSSIBLE (3): + Some matching elements. + LIKELY (4): + No description available. + VERY_LIKELY (5): + Many matching elements. + """ + LIKELIHOOD_UNSPECIFIED = 0 + VERY_UNLIKELY = 1 + UNLIKELY = 2 + POSSIBLE = 3 + LIKELY = 4 + VERY_LIKELY = 5 + + +class FileType(proto.Enum): + r"""Definitions of file type groups to scan. New types will be + added to this list. + + Values: + FILE_TYPE_UNSPECIFIED (0): + Includes all files. + BINARY_FILE (1): + Includes all file extensions not covered by another entry. + Binary scanning attempts to convert the content of the file + to utf_8 to scan the file. If you wish to avoid this fall + back, specify one or more of the other FileType's in your + storage scan. + TEXT_FILE (2): + Included file extensions: + asc,asp, aspx, brf, c, cc,cfm, cgi, cpp, csv, + cxx, c++, cs, css, dart, dat, dot, eml,, + epbub, ged, go, h, hh, hpp, hxx, h++, hs, html, + htm, mkd, markdown, m, ml, mli, perl, pl, + plist, pm, php, phtml, pht, properties, py, + pyw, rb, rbw, rs, rss, rc, scala, sh, sql, + swift, tex, shtml, shtm, xhtml, lhs, ics, ini, + java, js, json, kix, kml, ocaml, md, txt, + text, tsv, vb, vcard, vcs, wml, xcodeproj, xml, + xsl, xsd, yml, yaml. + IMAGE (3): + Included file extensions: bmp, gif, jpg, jpeg, jpe, png. + bytes_limit_per_file has no effect on image files. Image + inspection is restricted to 'global', 'us', 'asia', and + 'europe'. + WORD (5): + Word files >30 MB will be scanned as binary + files. Included file extensions: + docx, dotx, docm, dotm + PDF (6): + PDF files >30 MB will be scanned as binary + files. Included file extensions: + pdf + AVRO (7): + Included file extensions: + avro + CSV (8): + Included file extensions: + csv + TSV (9): + Included file extensions: + tsv + POWERPOINT (11): + Powerpoint files >30 MB will be scanned as + binary files. Included file extensions: + pptx, pptm, potx, potm, pot + EXCEL (12): + Excel files >30 MB will be scanned as binary + files. Included file extensions: + xlsx, xlsm, xltx, xltm + """ + FILE_TYPE_UNSPECIFIED = 0 + BINARY_FILE = 1 + TEXT_FILE = 2 + IMAGE = 3 + WORD = 5 + PDF = 6 + AVRO = 7 + CSV = 8 + TSV = 9 + POWERPOINT = 11 + EXCEL = 12 + + +class InfoType(proto.Message): + r"""Type of information detected by the API. + + Attributes: + name (str): + Name of the information type. Either a name of your choosing + when creating a CustomInfoType, or one of the names listed + at https://cloud.google.com/dlp/docs/infotypes-reference + when specifying a built-in type. When sending Cloud DLP + results to Data Catalog, infoType names should conform to + the pattern ``[A-Za-z0-9$_-]{1,64}``. + version (str): + Optional version name for this InfoType. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SensitivityScore(proto.Message): + r"""Score is a summary of all elements in the data profile. + A higher number means more sensitive. + + Attributes: + score (google.cloud.dlp_v2.types.SensitivityScore.SensitivityScoreLevel): + The score applied to the resource. + """ + class SensitivityScoreLevel(proto.Enum): + r"""Various score levels for resources. + + Values: + SENSITIVITY_SCORE_UNSPECIFIED (0): + Unused. + SENSITIVITY_LOW (10): + No sensitive information detected. Limited + access. + SENSITIVITY_MODERATE (20): + Medium risk - PII, potentially sensitive + data, or fields with free-text data that are at + higher risk of having intermittent sensitive + data. Consider limiting access. + SENSITIVITY_HIGH (30): + High risk – SPII may be present. Exfiltration + of data may lead to user data loss. + Re-identification of users may be possible. + Consider limiting usage and or removing SPII. + """ + SENSITIVITY_SCORE_UNSPECIFIED = 0 + SENSITIVITY_LOW = 10 + SENSITIVITY_MODERATE = 20 + SENSITIVITY_HIGH = 30 + + score: SensitivityScoreLevel = proto.Field( + proto.ENUM, + number=1, + enum=SensitivityScoreLevel, + ) + + +class StoredType(proto.Message): + r"""A reference to a StoredInfoType to use with scanning. + + Attributes: + name (str): + Resource name of the requested ``StoredInfoType``, for + example + ``organizations/433245324/storedInfoTypes/432452342`` or + ``projects/project-id/storedInfoTypes/432452342``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp indicating when the version of the + ``StoredInfoType`` used for inspection was created. + Output-only field, populated by the system. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class CustomInfoType(proto.Message): + r"""Custom information type provided by the user. Used to find + domain-specific sensitive information configurable to the data + in question. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + CustomInfoType can either be a new infoType, or an extension + of built-in infoType, when the name matches one of existing + infoTypes and that infoType is specified in + ``InspectContent.info_types`` field. Specifying the latter + adds findings to the one detected by the system. If built-in + info type is not specified in ``InspectContent.info_types`` + list then the name is treated as a custom info type. + likelihood (google.cloud.dlp_v2.types.Likelihood): + Likelihood to return for this CustomInfoType. This base + value can be altered by a detection rule if the finding + meets the criteria specified by the rule. Defaults to + ``VERY_LIKELY`` if not specified. + dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): + A list of phrases to detect as a + CustomInfoType. + + This field is a member of `oneof`_ ``type``. + regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression based CustomInfoType. + + This field is a member of `oneof`_ ``type``. + surrogate_type (google.cloud.dlp_v2.types.CustomInfoType.SurrogateType): + Message for detecting output from + deidentification transformations that support + reversing. + + This field is a member of `oneof`_ ``type``. + stored_type (google.cloud.dlp_v2.types.StoredType): + Load an existing ``StoredInfoType`` resource for use in + ``InspectDataSource``. Not currently supported in + ``InspectContent``. + + This field is a member of `oneof`_ ``type``. + detection_rules (MutableSequence[google.cloud.dlp_v2.types.CustomInfoType.DetectionRule]): + Set of detection rules to apply to all findings of this + CustomInfoType. Rules are applied in order that they are + specified. Not supported for the ``surrogate_type`` + CustomInfoType. + exclusion_type (google.cloud.dlp_v2.types.CustomInfoType.ExclusionType): + If set to EXCLUSION_TYPE_EXCLUDE this infoType will not + cause a finding to be returned. It still can be used for + rules matching. + """ + class ExclusionType(proto.Enum): + r""" + + Values: + EXCLUSION_TYPE_UNSPECIFIED (0): + A finding of this custom info type will not + be excluded from results. + EXCLUSION_TYPE_EXCLUDE (1): + A finding of this custom info type will be + excluded from final results, but can still + affect rule execution. + """ + EXCLUSION_TYPE_UNSPECIFIED = 0 + EXCLUSION_TYPE_EXCLUDE = 1 + + class Dictionary(proto.Message): + r"""Custom information type based on a dictionary of words or phrases. + This can be used to match sensitive information specific to the + data, such as a list of employee IDs or job titles. + + Dictionary words are case-insensitive and all characters other than + letters and digits in the unicode `Basic Multilingual + Plane `__ + will be replaced with whitespace when scanning for matches, so the + dictionary phrase "Sam Johnson" will match all three phrases "sam + johnson", "Sam, Johnson", and "Sam (Johnson)". Additionally, the + characters surrounding any match must be of a different type than + the adjacent characters within the word, so letters must be next to + non-letters and digits next to non-digits. For example, the + dictionary word "jen" will match the first three letters of the text + "jen123" but will return no matches for "jennifer". + + Dictionary words containing a large number of characters that are + not letters or digits may result in unexpected findings because such + characters are treated as whitespace. The + `limits `__ page contains + details about the size limits of dictionaries. For dictionaries that + do not fit within these constraints, consider using + ``LargeCustomDictionaryConfig`` in the ``StoredInfoType`` API. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): + List of words or phrases to search for. + + This field is a member of `oneof`_ ``source``. + cloud_storage_path (google.cloud.dlp_v2.types.CloudStoragePath): + Newline-delimited file of words in Cloud + Storage. Only a single file is accepted. + + This field is a member of `oneof`_ ``source``. + """ + + class WordList(proto.Message): + r"""Message defining a list of words or phrases to search for in + the data. + + Attributes: + words (MutableSequence[str]): + Words or phrases defining the dictionary. The dictionary + must contain at least one phrase and every phrase must + contain at least 2 characters that are letters or digits. + [required] + """ + + words: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + word_list: 'CustomInfoType.Dictionary.WordList' = proto.Field( + proto.MESSAGE, + number=1, + oneof='source', + message='CustomInfoType.Dictionary.WordList', + ) + cloud_storage_path: 'CloudStoragePath' = proto.Field( + proto.MESSAGE, + number=3, + oneof='source', + message='CloudStoragePath', + ) + + class Regex(proto.Message): + r"""Message defining a custom regular expression. + + Attributes: + pattern (str): + Pattern defining the regular expression. Its + syntax + (https://github.com/google/re2/wiki/Syntax) can + be found under the google/re2 repository on + GitHub. + group_indexes (MutableSequence[int]): + The index of the submatch to extract as + findings. When not specified, the entire match + is returned. No more than 3 may be included. + """ + + pattern: str = proto.Field( + proto.STRING, + number=1, + ) + group_indexes: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=2, + ) + + class SurrogateType(proto.Message): + r"""Message for detecting output from deidentification transformations + such as + ```CryptoReplaceFfxFpeConfig`` `__. + These types of transformations are those that perform + pseudonymization, thereby producing a "surrogate" as output. This + should be used in conjunction with a field on the transformation + such as ``surrogate_info_type``. This CustomInfoType does not + support the use of ``detection_rules``. + + """ + + class DetectionRule(proto.Message): + r"""Deprecated; use ``InspectionRuleSet`` instead. Rule for modifying a + ``CustomInfoType`` to alter behavior under certain circumstances, + depending on the specific details of the rule. Not supported for the + ``surrogate_type`` custom infoType. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): + Hotword-based detection rule. + + This field is a member of `oneof`_ ``type``. + """ + + class Proximity(proto.Message): + r"""Message for specifying a window around a finding to apply a + detection rule. + + Attributes: + window_before (int): + Number of characters before the finding to consider. For + tabular data, if you want to modify the likelihood of an + entire column of findngs, set this to 1. For more + information, see [Hotword example: Set the match likelihood + of a table column] + (https://cloud.google.com/dlp/docs/creating-custom-infotypes-likelihood#match-column-values). + window_after (int): + Number of characters after the finding to + consider. + """ + + window_before: int = proto.Field( + proto.INT32, + number=1, + ) + window_after: int = proto.Field( + proto.INT32, + number=2, + ) + + class LikelihoodAdjustment(proto.Message): + r"""Message for specifying an adjustment to the likelihood of a + finding as part of a detection rule. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fixed_likelihood (google.cloud.dlp_v2.types.Likelihood): + Set the likelihood of a finding to a fixed + value. + + This field is a member of `oneof`_ ``adjustment``. + relative_likelihood (int): + Increase or decrease the likelihood by the specified number + of levels. For example, if a finding would be ``POSSIBLE`` + without the detection rule and ``relative_likelihood`` is 1, + then it is upgraded to ``LIKELY``, while a value of -1 would + downgrade it to ``UNLIKELY``. Likelihood may never drop + below ``VERY_UNLIKELY`` or exceed ``VERY_LIKELY``, so + applying an adjustment of 1 followed by an adjustment of -1 + when base likelihood is ``VERY_LIKELY`` will result in a + final likelihood of ``LIKELY``. + + This field is a member of `oneof`_ ``adjustment``. + """ + + fixed_likelihood: 'Likelihood' = proto.Field( + proto.ENUM, + number=1, + oneof='adjustment', + enum='Likelihood', + ) + relative_likelihood: int = proto.Field( + proto.INT32, + number=2, + oneof='adjustment', + ) + + class HotwordRule(proto.Message): + r"""The rule that adjusts the likelihood of findings within a + certain proximity of hotwords. + + Attributes: + hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression pattern defining what + qualifies as a hotword. + proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): + Range of characters within which the entire hotword must + reside. The total length of the window cannot exceed 1000 + characters. The finding itself will be included in the + window, so that hotwords can be used to match substrings of + the finding itself. Suppose you want Cloud DLP to promote + the likelihood of the phone number regex "(\d{3}) + \\d{3}-\d{4}" if the area code is known to be the area code + of a company's office. In this case, use the hotword regex + "(xxx)", where "xxx" is the area code in question. + + For tabular data, if you want to modify the likelihood of an + entire column of findngs, see [Hotword example: Set the + match likelihood of a table column] + (https://cloud.google.com/dlp/docs/creating-custom-infotypes-likelihood#match-column-values). + likelihood_adjustment (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.LikelihoodAdjustment): + Likelihood adjustment to apply to all + matching findings. + """ + + hotword_regex: 'CustomInfoType.Regex' = proto.Field( + proto.MESSAGE, + number=1, + message='CustomInfoType.Regex', + ) + proximity: 'CustomInfoType.DetectionRule.Proximity' = proto.Field( + proto.MESSAGE, + number=2, + message='CustomInfoType.DetectionRule.Proximity', + ) + likelihood_adjustment: 'CustomInfoType.DetectionRule.LikelihoodAdjustment' = proto.Field( + proto.MESSAGE, + number=3, + message='CustomInfoType.DetectionRule.LikelihoodAdjustment', + ) + + hotword_rule: 'CustomInfoType.DetectionRule.HotwordRule' = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message='CustomInfoType.DetectionRule.HotwordRule', + ) + + info_type: 'InfoType' = proto.Field( + proto.MESSAGE, + number=1, + message='InfoType', + ) + likelihood: 'Likelihood' = proto.Field( + proto.ENUM, + number=6, + enum='Likelihood', + ) + dictionary: Dictionary = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message=Dictionary, + ) + regex: Regex = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message=Regex, + ) + surrogate_type: SurrogateType = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message=SurrogateType, + ) + stored_type: 'StoredType' = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message='StoredType', + ) + detection_rules: MutableSequence[DetectionRule] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=DetectionRule, + ) + exclusion_type: ExclusionType = proto.Field( + proto.ENUM, + number=8, + enum=ExclusionType, + ) + + +class FieldId(proto.Message): + r"""General identifier of a data field in a storage service. + + Attributes: + name (str): + Name describing the field. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PartitionId(proto.Message): + r"""Datastore partition ID. + A partition ID identifies a grouping of entities. The grouping + is always by project and namespace, however the namespace ID may + be empty. + A partition ID contains several dimensions: + project ID and namespace ID. + + Attributes: + project_id (str): + The ID of the project to which the entities + belong. + namespace_id (str): + If not empty, the ID of the namespace to + which the entities belong. + """ + + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + namespace_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class KindExpression(proto.Message): + r"""A representation of a Datastore kind. + + Attributes: + name (str): + The name of the kind. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DatastoreOptions(proto.Message): + r"""Options defining a data set within Google Cloud Datastore. + + Attributes: + partition_id (google.cloud.dlp_v2.types.PartitionId): + A partition ID identifies a grouping of + entities. The grouping is always by project and + namespace, however the namespace ID may be + empty. + kind (google.cloud.dlp_v2.types.KindExpression): + The kind to process. + """ + + partition_id: 'PartitionId' = proto.Field( + proto.MESSAGE, + number=1, + message='PartitionId', + ) + kind: 'KindExpression' = proto.Field( + proto.MESSAGE, + number=2, + message='KindExpression', + ) + + +class CloudStorageRegexFileSet(proto.Message): + r"""Message representing a set of files in a Cloud Storage bucket. + Regular expressions are used to allow fine-grained control over + which files in the bucket to include. + + Included files are those that match at least one item in + ``include_regex`` and do not match any items in ``exclude_regex``. + Note that a file that matches items from both lists will *not* be + included. For a match to occur, the entire file path (i.e., + everything in the url after the bucket name) must match the regular + expression. + + For example, given the input + ``{bucket_name: "mybucket", include_regex: ["directory1/.*"], exclude_regex: ["directory1/excluded.*"]}``: + + - ``gs://mybucket/directory1/myfile`` will be included + - ``gs://mybucket/directory1/directory2/myfile`` will be included + (``.*`` matches across ``/``) + - ``gs://mybucket/directory0/directory1/myfile`` will *not* be + included (the full path doesn't match any items in + ``include_regex``) + - ``gs://mybucket/directory1/excludedfile`` will *not* be included + (the path matches an item in ``exclude_regex``) + + If ``include_regex`` is left empty, it will match all files by + default (this is equivalent to setting ``include_regex: [".*"]``). + + Some other common use cases: + + - ``{bucket_name: "mybucket", exclude_regex: [".*\.pdf"]}`` will + include all files in ``mybucket`` except for .pdf files + - ``{bucket_name: "mybucket", include_regex: ["directory/[^/]+"]}`` + will include all files directly under + ``gs://mybucket/directory/``, without matching across ``/`` + + Attributes: + bucket_name (str): + The name of a Cloud Storage bucket. Required. + include_regex (MutableSequence[str]): + A list of regular expressions matching file paths to + include. All files in the bucket that match at least one of + these regular expressions will be included in the set of + files, except for those that also match an item in + ``exclude_regex``. Leaving this field empty will match all + files by default (this is equivalent to including ``.*`` in + the list). + + Regular expressions use RE2 + `syntax `__; a + guide can be found under the google/re2 repository on + GitHub. + exclude_regex (MutableSequence[str]): + A list of regular expressions matching file paths to + exclude. All files in the bucket that match at least one of + these regular expressions will be excluded from the scan. + + Regular expressions use RE2 + `syntax `__; a + guide can be found under the google/re2 repository on + GitHub. + """ + + bucket_name: str = proto.Field( + proto.STRING, + number=1, + ) + include_regex: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + exclude_regex: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CloudStorageOptions(proto.Message): + r"""Options defining a file or a set of files within a Cloud + Storage bucket. + + Attributes: + file_set (google.cloud.dlp_v2.types.CloudStorageOptions.FileSet): + The set of one or more files to scan. + bytes_limit_per_file (int): + Max number of bytes to scan from a file. If a scanned file's + size is bigger than this value then the rest of the bytes + are omitted. Only one of bytes_limit_per_file and + bytes_limit_per_file_percent can be specified. Cannot be set + if de-identification is requested. + bytes_limit_per_file_percent (int): + Max percentage of bytes to scan from a file. The rest are + omitted. The number of bytes scanned is rounded down. Must + be between 0 and 100, inclusively. Both 0 and 100 means no + limit. Defaults to 0. Only one of bytes_limit_per_file and + bytes_limit_per_file_percent can be specified. Cannot be set + if de-identification is requested. + file_types (MutableSequence[google.cloud.dlp_v2.types.FileType]): + List of file type groups to include in the scan. If empty, + all files are scanned and available data format processors + are applied. In addition, the binary content of the selected + files is always scanned as well. Images are scanned only as + binary if the specified region does not support image + inspection and no file_types were specified. Image + inspection is restricted to 'global', 'us', 'asia', and + 'europe'. + sample_method (google.cloud.dlp_v2.types.CloudStorageOptions.SampleMethod): + + files_limit_percent (int): + Limits the number of files to scan to this + percentage of the input FileSet. Number of files + scanned is rounded down. Must be between 0 and + 100, inclusively. Both 0 and 100 means no limit. + Defaults to 0. + """ + class SampleMethod(proto.Enum): + r"""How to sample bytes if not all bytes are scanned. Meaningful only + when used in conjunction with bytes_limit_per_file. If not + specified, scanning would start from the top. + + Values: + SAMPLE_METHOD_UNSPECIFIED (0): + No description available. + TOP (1): + Scan from the top (default). + RANDOM_START (2): + For each file larger than bytes_limit_per_file, randomly + pick the offset to start scanning. The scanned bytes are + contiguous. + """ + SAMPLE_METHOD_UNSPECIFIED = 0 + TOP = 1 + RANDOM_START = 2 + + class FileSet(proto.Message): + r"""Set of files to scan. + + Attributes: + url (str): + The Cloud Storage url of the file(s) to scan, in the format + ``gs:///``. Trailing wildcard in the path is + allowed. + + If the url ends in a trailing slash, the bucket or directory + represented by the url will be scanned non-recursively + (content in sub-directories will not be scanned). This means + that ``gs://mybucket/`` is equivalent to + ``gs://mybucket/*``, and ``gs://mybucket/directory/`` is + equivalent to ``gs://mybucket/directory/*``. + + Exactly one of ``url`` or ``regex_file_set`` must be set. + regex_file_set (google.cloud.dlp_v2.types.CloudStorageRegexFileSet): + The regex-filtered set of files to scan. Exactly one of + ``url`` or ``regex_file_set`` must be set. + """ + + url: str = proto.Field( + proto.STRING, + number=1, + ) + regex_file_set: 'CloudStorageRegexFileSet' = proto.Field( + proto.MESSAGE, + number=2, + message='CloudStorageRegexFileSet', + ) + + file_set: FileSet = proto.Field( + proto.MESSAGE, + number=1, + message=FileSet, + ) + bytes_limit_per_file: int = proto.Field( + proto.INT64, + number=4, + ) + bytes_limit_per_file_percent: int = proto.Field( + proto.INT32, + number=8, + ) + file_types: MutableSequence['FileType'] = proto.RepeatedField( + proto.ENUM, + number=5, + enum='FileType', + ) + sample_method: SampleMethod = proto.Field( + proto.ENUM, + number=6, + enum=SampleMethod, + ) + files_limit_percent: int = proto.Field( + proto.INT32, + number=7, + ) + + +class CloudStorageFileSet(proto.Message): + r"""Message representing a set of files in Cloud Storage. + + Attributes: + url (str): + The url, in the format ``gs:///``. Trailing + wildcard in the path is allowed. + """ + + url: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CloudStoragePath(proto.Message): + r"""Message representing a single file or path in Cloud Storage. + + Attributes: + path (str): + A url representing a file or path (no wildcards) in Cloud + Storage. Example: gs://[BUCKET_NAME]/dictionary.txt + """ + + path: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BigQueryOptions(proto.Message): + r"""Options defining BigQuery table and row identifiers. + + Attributes: + table_reference (google.cloud.dlp_v2.types.BigQueryTable): + Complete BigQuery table reference. + identifying_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Table fields that may uniquely identify a row within the + table. When ``actions.saveFindings.outputConfig.table`` is + specified, the values of columns specified here are + available in the output table under + ``location.content_locations.record_location.record_key.id_values``. + Nested fields such as ``person.birthdate.year`` are allowed. + rows_limit (int): + Max number of rows to scan. If the table has more rows than + this value, the rest of the rows are omitted. If not set, or + if set to 0, all rows will be scanned. Only one of + rows_limit and rows_limit_percent can be specified. Cannot + be used in conjunction with TimespanConfig. + rows_limit_percent (int): + Max percentage of rows to scan. The rest are omitted. The + number of rows scanned is rounded down. Must be between 0 + and 100, inclusively. Both 0 and 100 means no limit. + Defaults to 0. Only one of rows_limit and rows_limit_percent + can be specified. Cannot be used in conjunction with + TimespanConfig. + sample_method (google.cloud.dlp_v2.types.BigQueryOptions.SampleMethod): + + excluded_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + References to fields excluded from scanning. + This allows you to skip inspection of entire + columns which you know have no findings. + included_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Limit scanning only to these fields. + """ + class SampleMethod(proto.Enum): + r"""How to sample rows if not all rows are scanned. Meaningful only when + used in conjunction with either rows_limit or rows_limit_percent. If + not specified, rows are scanned in the order BigQuery reads them. + + Values: + SAMPLE_METHOD_UNSPECIFIED (0): + No description available. + TOP (1): + Scan groups of rows in the order BigQuery + provides (default). Multiple groups of rows may + be scanned in parallel, so results may not + appear in the same order the rows are read. + RANDOM_START (2): + Randomly pick groups of rows to scan. + """ + SAMPLE_METHOD_UNSPECIFIED = 0 + TOP = 1 + RANDOM_START = 2 + + table_reference: 'BigQueryTable' = proto.Field( + proto.MESSAGE, + number=1, + message='BigQueryTable', + ) + identifying_fields: MutableSequence['FieldId'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='FieldId', + ) + rows_limit: int = proto.Field( + proto.INT64, + number=3, + ) + rows_limit_percent: int = proto.Field( + proto.INT32, + number=6, + ) + sample_method: SampleMethod = proto.Field( + proto.ENUM, + number=4, + enum=SampleMethod, + ) + excluded_fields: MutableSequence['FieldId'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='FieldId', + ) + included_fields: MutableSequence['FieldId'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='FieldId', + ) + + +class StorageConfig(proto.Message): + r"""Shared message indicating Cloud storage type. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + datastore_options (google.cloud.dlp_v2.types.DatastoreOptions): + Google Cloud Datastore options. + + This field is a member of `oneof`_ ``type``. + cloud_storage_options (google.cloud.dlp_v2.types.CloudStorageOptions): + Cloud Storage options. + + This field is a member of `oneof`_ ``type``. + big_query_options (google.cloud.dlp_v2.types.BigQueryOptions): + BigQuery options. + + This field is a member of `oneof`_ ``type``. + hybrid_options (google.cloud.dlp_v2.types.HybridOptions): + Hybrid inspection options. + + This field is a member of `oneof`_ ``type``. + timespan_config (google.cloud.dlp_v2.types.StorageConfig.TimespanConfig): + + """ + + class TimespanConfig(proto.Message): + r"""Configuration of the timespan of the items to include in + scanning. Currently only supported when inspecting Cloud Storage + and BigQuery. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Exclude files, tables, or rows older than + this value. If not set, no lower time limit is + applied. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Exclude files, tables, or rows newer than + this value. If not set, no upper time limit is + applied. + timestamp_field (google.cloud.dlp_v2.types.FieldId): + Specification of the field containing the timestamp of + scanned items. Used for data sources like Datastore and + BigQuery. + + For BigQuery + + If this value is not specified and the table was modified + between the given start and end times, the entire table will + be scanned. If this value is specified, then rows are + filtered based on the given start and end times. Rows with a + ``NULL`` value in the provided BigQuery column are skipped. + Valid data types of the provided BigQuery column are: + ``INTEGER``, ``DATE``, ``TIMESTAMP``, and ``DATETIME``. + + If your BigQuery table is `partitioned at ingestion + time `__, + you can use any of the following pseudo-columns as your + timestamp field. When used with Cloud DLP, these + pseudo-column names are case sensitive. + + .. raw:: html + +
    +
  • _PARTITIONTIME
  • +
  • _PARTITIONDATE
  • +
  • _PARTITION_LOAD_TIME
  • +
+ + For Datastore + + If this value is specified, then entities are filtered based + on the given start and end times. If an entity does not + contain the provided timestamp property or contains empty or + invalid values, then it is included. Valid data types of the + provided timestamp property are: ``TIMESTAMP``. + + See the `known + issue `__ + related to this operation. + enable_auto_population_of_timespan_config (bool): + When the job is started by a JobTrigger we will + automatically figure out a valid start_time to avoid + scanning files that have not been modified since the last + time the JobTrigger executed. This will be based on the time + of the execution of the last run of the JobTrigger or the + timespan end_time used in the last run of the JobTrigger. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + timestamp_field: 'FieldId' = proto.Field( + proto.MESSAGE, + number=3, + message='FieldId', + ) + enable_auto_population_of_timespan_config: bool = proto.Field( + proto.BOOL, + number=4, + ) + + datastore_options: 'DatastoreOptions' = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message='DatastoreOptions', + ) + cloud_storage_options: 'CloudStorageOptions' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='CloudStorageOptions', + ) + big_query_options: 'BigQueryOptions' = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message='BigQueryOptions', + ) + hybrid_options: 'HybridOptions' = proto.Field( + proto.MESSAGE, + number=9, + oneof='type', + message='HybridOptions', + ) + timespan_config: TimespanConfig = proto.Field( + proto.MESSAGE, + number=6, + message=TimespanConfig, + ) + + +class HybridOptions(proto.Message): + r"""Configuration to control jobs where the content being + inspected is outside of Google Cloud Platform. + + Attributes: + description (str): + A short description of where the data is + coming from. Will be stored once in the job. 256 + max length. + required_finding_label_keys (MutableSequence[str]): + These are labels that each inspection request must include + within their 'finding_labels' map. Request may contain + others, but any missing one of these will be rejected. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + No more than 10 keys can be required. + labels (MutableMapping[str, str]): + To organize findings, these labels will be added to each + finding. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + table_options (google.cloud.dlp_v2.types.TableOptions): + If the container is a table, additional + information to make findings meaningful such as + the columns that are primary keys. + """ + + description: str = proto.Field( + proto.STRING, + number=1, + ) + required_finding_label_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + table_options: 'TableOptions' = proto.Field( + proto.MESSAGE, + number=4, + message='TableOptions', + ) + + +class BigQueryKey(proto.Message): + r"""Row key for identifying a record in BigQuery table. + + Attributes: + table_reference (google.cloud.dlp_v2.types.BigQueryTable): + Complete BigQuery table reference. + row_number (int): + Row number inferred at the time the table was scanned. This + value is nondeterministic, cannot be queried, and may be + null for inspection jobs. To locate findings within a table, + specify + ``inspect_job.storage_config.big_query_options.identifying_fields`` + in ``CreateDlpJobRequest``. + """ + + table_reference: 'BigQueryTable' = proto.Field( + proto.MESSAGE, + number=1, + message='BigQueryTable', + ) + row_number: int = proto.Field( + proto.INT64, + number=2, + ) + + +class DatastoreKey(proto.Message): + r"""Record key for a finding in Cloud Datastore. + + Attributes: + entity_key (google.cloud.dlp_v2.types.Key): + Datastore entity key. + """ + + entity_key: 'Key' = proto.Field( + proto.MESSAGE, + number=1, + message='Key', + ) + + +class Key(proto.Message): + r"""A unique identifier for a Datastore entity. + If a key's partition ID or any of its path kinds or names are + reserved/read-only, the key is reserved/read-only. + A reserved/read-only key is forbidden in certain documented + contexts. + + Attributes: + partition_id (google.cloud.dlp_v2.types.PartitionId): + Entities are partitioned into subsets, + currently identified by a project ID and + namespace ID. Queries are scoped to a single + partition. + path (MutableSequence[google.cloud.dlp_v2.types.Key.PathElement]): + The entity path. An entity path consists of one or more + elements composed of a kind and a string or numerical + identifier, which identify entities. The first element + identifies a *root entity*, the second element identifies a + *child* of the root entity, the third element identifies a + child of the second entity, and so forth. The entities + identified by all prefixes of the path are called the + element's *ancestors*. + + A path can never be empty, and a path can have at most 100 + elements. + """ + + class PathElement(proto.Message): + r"""A (kind, ID/name) pair used to construct a key path. + If either name or ID is set, the element is complete. If neither + is set, the element is incomplete. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kind (str): + The kind of the entity. A kind matching regex ``__.*__`` is + reserved/read-only. A kind must not contain more than 1500 + bytes when UTF-8 encoded. Cannot be ``""``. + id (int): + The auto-allocated ID of the entity. + Never equal to zero. Values less than zero are + discouraged and may not be supported in the + future. + + This field is a member of `oneof`_ ``id_type``. + name (str): + The name of the entity. A name matching regex ``__.*__`` is + reserved/read-only. A name must not be more than 1500 bytes + when UTF-8 encoded. Cannot be ``""``. + + This field is a member of `oneof`_ ``id_type``. + """ + + kind: str = proto.Field( + proto.STRING, + number=1, + ) + id: int = proto.Field( + proto.INT64, + number=2, + oneof='id_type', + ) + name: str = proto.Field( + proto.STRING, + number=3, + oneof='id_type', + ) + + partition_id: 'PartitionId' = proto.Field( + proto.MESSAGE, + number=1, + message='PartitionId', + ) + path: MutableSequence[PathElement] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=PathElement, + ) + + +class RecordKey(proto.Message): + r"""Message for a unique key indicating a record that contains a + finding. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + datastore_key (google.cloud.dlp_v2.types.DatastoreKey): + + This field is a member of `oneof`_ ``type``. + big_query_key (google.cloud.dlp_v2.types.BigQueryKey): + + This field is a member of `oneof`_ ``type``. + id_values (MutableSequence[str]): + Values of identifying columns in the given row. Order of + values matches the order of ``identifying_fields`` specified + in the scanning request. + """ + + datastore_key: 'DatastoreKey' = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message='DatastoreKey', + ) + big_query_key: 'BigQueryKey' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='BigQueryKey', + ) + id_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +class BigQueryTable(proto.Message): + r"""Message defining the location of a BigQuery table. A table is + uniquely identified by its project_id, dataset_id, and table_name. + Within a query a table is often referenced with a string in the + format of: ``:.`` or + ``..``. + + Attributes: + project_id (str): + The Google Cloud Platform project ID of the + project containing the table. If omitted, + project ID is inferred from the API call. + dataset_id (str): + Dataset ID of the table. + table_id (str): + Name of the table. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + dataset_id: str = proto.Field( + proto.STRING, + number=2, + ) + table_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class BigQueryField(proto.Message): + r"""Message defining a field of a BigQuery table. + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Source table of the field. + field (google.cloud.dlp_v2.types.FieldId): + Designated field in the BigQuery table. + """ + + table: 'BigQueryTable' = proto.Field( + proto.MESSAGE, + number=1, + message='BigQueryTable', + ) + field: 'FieldId' = proto.Field( + proto.MESSAGE, + number=2, + message='FieldId', + ) + + +class EntityId(proto.Message): + r"""An entity in a dataset is a field or set of fields that correspond + to a single person. For example, in medical records the ``EntityId`` + might be a patient identifier, or for financial records it might be + an account identifier. This message is used when generalizations or + analysis must take into account that multiple rows correspond to the + same entity. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Composite key indicating which field contains + the entity identifier. + """ + + field: 'FieldId' = proto.Field( + proto.MESSAGE, + number=1, + message='FieldId', + ) + + +class TableOptions(proto.Message): + r"""Instructions regarding the table content being inspected. + + Attributes: + identifying_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + The columns that are the primary keys for + table objects included in ContentItem. A copy of + this cell's value will stored alongside + alongside each finding so that the finding can + be traced to the specific row it came from. No + more than 3 may be provided. + """ + + identifying_fields: MutableSequence['FieldId'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='FieldId', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/mypy.ini b/owl-bot-staging/v2/mypy.ini new file mode 100644 index 00000000..574c5aed --- /dev/null +++ b/owl-bot-staging/v2/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/v2/noxfile.py b/owl-bot-staging/v2/noxfile.py new file mode 100644 index 00000000..6b1462df --- /dev/null +++ b/owl-bot-staging/v2/noxfile.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.11" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/dlp_v2/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py new file mode 100644 index 00000000..e4371abf --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ActivateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ActivateJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_activate_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ActivateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.activate_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ActivateJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py new file mode 100644 index 00000000..c0b4fac1 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ActivateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ActivateJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_activate_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ActivateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.activate_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ActivateJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py new file mode 100644 index 00000000..d8190299 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CancelDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_cancel_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CancelDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_CancelDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py new file mode 100644 index 00000000..7475d6fa --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CancelDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_cancel_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CancelDlpJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_CancelDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py new file mode 100644 index 00000000..81ad2519 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDeidentifyTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py new file mode 100644 index 00000000..b394f634 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDeidentifyTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py new file mode 100644 index 00000000..28770717 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDlpJobRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py new file mode 100644 index 00000000..779754f6 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDlpJobRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py new file mode 100644 index 00000000..aeb40676 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateInspectTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateInspectTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py new file mode 100644 index 00000000..0e344b36 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateInspectTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateInspectTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py new file mode 100644 index 00000000..3e82b8f3 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + job_trigger = dlp_v2.JobTrigger() + job_trigger.status = "CANCELLED" + + request = dlp_v2.CreateJobTriggerRequest( + parent="parent_value", + job_trigger=job_trigger, + ) + + # Make the request + response = await client.create_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py new file mode 100644 index 00000000..ebb74284 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + job_trigger = dlp_v2.JobTrigger() + job_trigger.status = "CANCELLED" + + request = dlp_v2.CreateJobTriggerRequest( + parent="parent_value", + job_trigger=job_trigger, + ) + + # Make the request + response = client.create_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py new file mode 100644 index 00000000..cae6db89 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateStoredInfoType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateStoredInfoTypeRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py new file mode 100644 index 00000000..d59a301d --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateStoredInfoType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateStoredInfoTypeRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py new file mode 100644 index 00000000..4903b032 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeidentifyContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeidentifyContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_deidentify_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeidentifyContentRequest( + ) + + # Make the request + response = await client.deidentify_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_DeidentifyContent_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py new file mode 100644 index 00000000..2422616c --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeidentifyContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeidentifyContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_deidentify_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeidentifyContentRequest( + ) + + # Make the request + response = client.deidentify_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_DeidentifyContent_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py new file mode 100644 index 00000000..f544f12d --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_deidentify_template(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py new file mode 100644 index 00000000..a33f3b26 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_deidentify_template(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py new file mode 100644 index 00000000..8737125b --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.delete_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py new file mode 100644 index 00000000..bb0ce9df --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDlpJobRequest( + name="name_value", + ) + + # Make the request + client.delete_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py new file mode 100644 index 00000000..f0aec8eb --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteInspectTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteInspectTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_inspect_template(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py new file mode 100644 index 00000000..c908d867 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteInspectTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteInspectTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_inspect_template(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py new file mode 100644 index 00000000..3784ee3e --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteJobTriggerRequest( + name="name_value", + ) + + # Make the request + await client.delete_job_trigger(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py new file mode 100644 index 00000000..9f4405da --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteJobTriggerRequest( + name="name_value", + ) + + # Make the request + client.delete_job_trigger(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py new file mode 100644 index 00000000..652d88ff --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteStoredInfoType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + await client.delete_stored_info_type(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py new file mode 100644 index 00000000..7e37ce36 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteStoredInfoType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + client.delete_stored_info_type(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py new file mode 100644 index 00000000..869504da --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FinishDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_FinishDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_finish_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.FinishDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.finish_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_FinishDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py new file mode 100644 index 00000000..1b694f90 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FinishDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_FinishDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_finish_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.FinishDlpJobRequest( + name="name_value", + ) + + # Make the request + client.finish_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_FinishDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py new file mode 100644 index 00000000..fc1570d3 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetDeidentifyTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py new file mode 100644 index 00000000..bb1e1986 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py new file mode 100644 index 00000000..2065aa85 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDlpJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py new file mode 100644 index 00000000..13959bde --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDlpJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py new file mode 100644 index 00000000..1a9c9649 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetInspectTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py new file mode 100644 index 00000000..112e3d83 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetInspectTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py new file mode 100644 index 00000000..248184c7 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py new file mode 100644 index 00000000..9c6cdb3a --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.get_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py new file mode 100644 index 00000000..a7820fe2 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetStoredInfoType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py new file mode 100644 index 00000000..d0b0a44c --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetStoredInfoType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py new file mode 100644 index 00000000..e9f9be5a --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for HybridInspectDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_HybridInspectDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_hybrid_inspect_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectDlpJobRequest( + name="name_value", + ) + + # Make the request + response = await client.hybrid_inspect_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_HybridInspectDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py new file mode 100644 index 00000000..2bfd7fe1 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for HybridInspectDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_HybridInspectDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_hybrid_inspect_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectDlpJobRequest( + name="name_value", + ) + + # Make the request + response = client.hybrid_inspect_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_HybridInspectDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py new file mode 100644 index 00000000..dbdd91c2 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for HybridInspectJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_HybridInspectJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_hybrid_inspect_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.hybrid_inspect_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_HybridInspectJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py new file mode 100644 index 00000000..a9c4c85e --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for HybridInspectJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_hybrid_inspect_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.hybrid_inspect_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py new file mode 100644 index 00000000..3f24588b --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InspectContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_InspectContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_inspect_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.InspectContentRequest( + ) + + # Make the request + response = await client.inspect_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_InspectContent_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py new file mode 100644 index 00000000..4b5a10f3 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InspectContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_InspectContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_inspect_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.InspectContentRequest( + ) + + # Make the request + response = client.inspect_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_InspectContent_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py new file mode 100644 index 00000000..d1a40dc0 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeidentifyTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListDeidentifyTemplates_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_deidentify_templates(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDeidentifyTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deidentify_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListDeidentifyTemplates_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py new file mode 100644 index 00000000..6a01f0fb --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeidentifyTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_deidentify_templates(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDeidentifyTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deidentify_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py new file mode 100644 index 00000000..57c790d8 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDlpJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListDlpJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_dlp_jobs(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDlpJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dlp_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListDlpJobs_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py new file mode 100644 index 00000000..7d06c237 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDlpJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListDlpJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_dlp_jobs(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDlpJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dlp_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListDlpJobs_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py new file mode 100644 index 00000000..16b871f8 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInfoTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListInfoTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_info_types(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListInfoTypesRequest( + ) + + # Make the request + response = await client.list_info_types(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ListInfoTypes_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py new file mode 100644 index 00000000..9e3ca167 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInfoTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListInfoTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_info_types(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListInfoTypesRequest( + ) + + # Make the request + response = client.list_info_types(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ListInfoTypes_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py new file mode 100644 index 00000000..6e405a4f --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInspectTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListInspectTemplates_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_inspect_templates(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListInspectTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inspect_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListInspectTemplates_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py new file mode 100644 index 00000000..71673677 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInspectTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListInspectTemplates_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_inspect_templates(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListInspectTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inspect_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListInspectTemplates_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py new file mode 100644 index 00000000..e8c0281f --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobTriggers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListJobTriggers_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_job_triggers(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListJobTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_triggers(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListJobTriggers_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py new file mode 100644 index 00000000..0f9141c0 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobTriggers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListJobTriggers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_job_triggers(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListJobTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_triggers(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListJobTriggers_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py new file mode 100644 index 00000000..460c99c4 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListStoredInfoTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListStoredInfoTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_stored_info_types(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListStoredInfoTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_stored_info_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListStoredInfoTypes_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py new file mode 100644 index 00000000..1ad1796e --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListStoredInfoTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListStoredInfoTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_stored_info_types(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListStoredInfoTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_stored_info_types(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListStoredInfoTypes_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py new file mode 100644 index 00000000..a7a0d502 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RedactImage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_RedactImage_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_redact_image(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.RedactImageRequest( + ) + + # Make the request + response = await client.redact_image(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_RedactImage_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py new file mode 100644 index 00000000..272bdb80 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RedactImage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_RedactImage_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_redact_image(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.RedactImageRequest( + ) + + # Make the request + response = client.redact_image(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_RedactImage_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py new file mode 100644 index 00000000..401f62df --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReidentifyContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ReidentifyContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_reidentify_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ReidentifyContentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.reidentify_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ReidentifyContent_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py new file mode 100644 index 00000000..9e654be9 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReidentifyContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ReidentifyContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_reidentify_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ReidentifyContentRequest( + parent="parent_value", + ) + + # Make the request + response = client.reidentify_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ReidentifyContent_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py new file mode 100644 index 00000000..8b32186c --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_update_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.update_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py new file mode 100644 index 00000000..e3296531 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_update_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.update_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py new file mode 100644 index 00000000..8e062116 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateInspectTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_update_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.update_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py new file mode 100644 index 00000000..332c5de6 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateInspectTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_update_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.update_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py new file mode 100644 index 00000000..58baaeeb --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_update_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.update_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py new file mode 100644 index 00000000..3694b5ff --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_update_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.update_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py new file mode 100644 index 00000000..d5658d32 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateStoredInfoType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_update_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.update_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py new file mode 100644 index 00000000..9471180b --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateStoredInfoType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_update_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = client.update_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json b/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json new file mode 100644 index 00000000..956f9eab --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json @@ -0,0 +1,5503 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.privacy.dlp.v2", + "version": "v2" + } + ], + "language": "PYTHON", + "name": "google-cloud-dlp", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.activate_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ActivateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ActivateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ActivateJobTriggerRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "activate_job_trigger" + }, + "description": "Sample for ActivateJobTrigger", + "file": "dlp_v2_generated_dlp_service_activate_job_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ActivateJobTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_activate_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.activate_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ActivateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ActivateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ActivateJobTriggerRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "activate_job_trigger" + }, + "description": "Sample for ActivateJobTrigger", + "file": "dlp_v2_generated_dlp_service_activate_job_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ActivateJobTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_activate_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.cancel_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CancelDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CancelDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CancelDlpJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "cancel_dlp_job" + }, + "description": "Sample for CancelDlpJob", + "file": "dlp_v2_generated_dlp_service_cancel_dlp_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CancelDlpJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_cancel_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.cancel_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CancelDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CancelDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CancelDlpJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "cancel_dlp_job" + }, + "description": "Sample for CancelDlpJob", + "file": "dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CancelDlpJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deidentify_template", + "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "create_deidentify_template" + }, + "description": "Sample for CreateDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_create_deidentify_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_deidentify_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deidentify_template", + "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "create_deidentify_template" + }, + "description": "Sample for CreateDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_create_deidentify_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_deidentify_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateDlpJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "inspect_job", + "type": "google.cloud.dlp_v2.types.InspectJobConfig" + }, + { + "name": "risk_job", + "type": "google.cloud.dlp_v2.types.RiskAnalysisJobConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "create_dlp_job" + }, + "description": "Sample for CreateDlpJob", + "file": "dlp_v2_generated_dlp_service_create_dlp_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateDlpJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateDlpJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "inspect_job", + "type": "google.cloud.dlp_v2.types.InspectJobConfig" + }, + { + "name": "risk_job", + "type": "google.cloud.dlp_v2.types.RiskAnalysisJobConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "create_dlp_job" + }, + "description": "Sample for CreateDlpJob", + "file": "dlp_v2_generated_dlp_service_create_dlp_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateDlpJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateInspectTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "inspect_template", + "type": "google.cloud.dlp_v2.types.InspectTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "create_inspect_template" + }, + "description": "Sample for CreateInspectTemplate", + "file": "dlp_v2_generated_dlp_service_create_inspect_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateInspectTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_inspect_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateInspectTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "inspect_template", + "type": "google.cloud.dlp_v2.types.InspectTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "create_inspect_template" + }, + "description": "Sample for CreateInspectTemplate", + "file": "dlp_v2_generated_dlp_service_create_inspect_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateInspectTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_inspect_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateJobTriggerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "job_trigger", + "type": "google.cloud.dlp_v2.types.JobTrigger" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "create_job_trigger" + }, + "description": "Sample for CreateJobTrigger", + "file": "dlp_v2_generated_dlp_service_create_job_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateJobTrigger_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateJobTriggerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "job_trigger", + "type": "google.cloud.dlp_v2.types.JobTrigger" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "create_job_trigger" + }, + "description": "Sample for CreateJobTrigger", + "file": "dlp_v2_generated_dlp_service_create_job_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateJobTrigger_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "config", + "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "create_stored_info_type" + }, + "description": "Sample for CreateStoredInfoType", + "file": "dlp_v2_generated_dlp_service_create_stored_info_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateStoredInfoType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_stored_info_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "config", + "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "create_stored_info_type" + }, + "description": "Sample for CreateStoredInfoType", + "file": "dlp_v2_generated_dlp_service_create_stored_info_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateStoredInfoType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_stored_info_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.deidentify_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeidentifyContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeidentifyContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeidentifyContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyContentResponse", + "shortName": "deidentify_content" + }, + "description": "Sample for DeidentifyContent", + "file": "dlp_v2_generated_dlp_service_deidentify_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeidentifyContent_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_deidentify_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.deidentify_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeidentifyContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeidentifyContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeidentifyContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyContentResponse", + "shortName": "deidentify_content" + }, + "description": "Sample for DeidentifyContent", + "file": "dlp_v2_generated_dlp_service_deidentify_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeidentifyContent_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_deidentify_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_deidentify_template" + }, + "description": "Sample for DeleteDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_delete_deidentify_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_deidentify_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_deidentify_template" + }, + "description": "Sample for DeleteDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_dlp_job" + }, + "description": "Sample for DeleteDlpJob", + "file": "dlp_v2_generated_dlp_service_delete_dlp_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteDlpJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_dlp_job" + }, + "description": "Sample for DeleteDlpJob", + "file": "dlp_v2_generated_dlp_service_delete_dlp_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteDlpJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_inspect_template" + }, + "description": "Sample for DeleteInspectTemplate", + "file": "dlp_v2_generated_dlp_service_delete_inspect_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteInspectTemplate_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_inspect_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_inspect_template" + }, + "description": "Sample for DeleteInspectTemplate", + "file": "dlp_v2_generated_dlp_service_delete_inspect_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteInspectTemplate_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_inspect_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_job_trigger" + }, + "description": "Sample for DeleteJobTrigger", + "file": "dlp_v2_generated_dlp_service_delete_job_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteJobTrigger_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_job_trigger" + }, + "description": "Sample for DeleteJobTrigger", + "file": "dlp_v2_generated_dlp_service_delete_job_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteJobTrigger_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_stored_info_type" + }, + "description": "Sample for DeleteStoredInfoType", + "file": "dlp_v2_generated_dlp_service_delete_stored_info_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteStoredInfoType_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_stored_info_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_stored_info_type" + }, + "description": "Sample for DeleteStoredInfoType", + "file": "dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteStoredInfoType_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.finish_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.FinishDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "FinishDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.FinishDlpJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "finish_dlp_job" + }, + "description": "Sample for FinishDlpJob", + "file": "dlp_v2_generated_dlp_service_finish_dlp_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_FinishDlpJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_finish_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.finish_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.FinishDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "FinishDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.FinishDlpJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "finish_dlp_job" + }, + "description": "Sample for FinishDlpJob", + "file": "dlp_v2_generated_dlp_service_finish_dlp_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_FinishDlpJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_finish_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "get_deidentify_template" + }, + "description": "Sample for GetDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_get_deidentify_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetDeidentifyTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_deidentify_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "get_deidentify_template" + }, + "description": "Sample for GetDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_get_deidentify_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_deidentify_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "get_dlp_job" + }, + "description": "Sample for GetDlpJob", + "file": "dlp_v2_generated_dlp_service_get_dlp_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetDlpJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "get_dlp_job" + }, + "description": "Sample for GetDlpJob", + "file": "dlp_v2_generated_dlp_service_get_dlp_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetDlpJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "get_inspect_template" + }, + "description": "Sample for GetInspectTemplate", + "file": "dlp_v2_generated_dlp_service_get_inspect_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetInspectTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_inspect_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "get_inspect_template" + }, + "description": "Sample for GetInspectTemplate", + "file": "dlp_v2_generated_dlp_service_get_inspect_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetInspectTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_inspect_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "get_job_trigger" + }, + "description": "Sample for GetJobTrigger", + "file": "dlp_v2_generated_dlp_service_get_job_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetJobTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "get_job_trigger" + }, + "description": "Sample for GetJobTrigger", + "file": "dlp_v2_generated_dlp_service_get_job_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetJobTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "get_stored_info_type" + }, + "description": "Sample for GetStoredInfoType", + "file": "dlp_v2_generated_dlp_service_get_stored_info_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetStoredInfoType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_stored_info_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "get_stored_info_type" + }, + "description": "Sample for GetStoredInfoType", + "file": "dlp_v2_generated_dlp_service_get_stored_info_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetStoredInfoType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_stored_info_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.hybrid_inspect_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "HybridInspectDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.HybridInspectDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", + "shortName": "hybrid_inspect_dlp_job" + }, + "description": "Sample for HybridInspectDlpJob", + "file": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_HybridInspectDlpJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.hybrid_inspect_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "HybridInspectDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.HybridInspectDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", + "shortName": "hybrid_inspect_dlp_job" + }, + "description": "Sample for HybridInspectDlpJob", + "file": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_HybridInspectDlpJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.hybrid_inspect_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "HybridInspectJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", + "shortName": "hybrid_inspect_job_trigger" + }, + "description": "Sample for HybridInspectJobTrigger", + "file": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_HybridInspectJobTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.hybrid_inspect_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "HybridInspectJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", + "shortName": "hybrid_inspect_job_trigger" + }, + "description": "Sample for HybridInspectJobTrigger", + "file": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.inspect_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.InspectContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "InspectContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.InspectContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectContentResponse", + "shortName": "inspect_content" + }, + "description": "Sample for InspectContent", + "file": "dlp_v2_generated_dlp_service_inspect_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_InspectContent_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_inspect_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.inspect_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.InspectContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "InspectContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.InspectContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectContentResponse", + "shortName": "inspect_content" + }, + "description": "Sample for InspectContent", + "file": "dlp_v2_generated_dlp_service_inspect_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_InspectContent_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_inspect_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_deidentify_templates", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListDeidentifyTemplates", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListDeidentifyTemplates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager", + "shortName": "list_deidentify_templates" + }, + "description": "Sample for ListDeidentifyTemplates", + "file": "dlp_v2_generated_dlp_service_list_deidentify_templates_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListDeidentifyTemplates_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_deidentify_templates_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_deidentify_templates", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListDeidentifyTemplates", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListDeidentifyTemplates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager", + "shortName": "list_deidentify_templates" + }, + "description": "Sample for ListDeidentifyTemplates", + "file": "dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_dlp_jobs", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListDlpJobs", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListDlpJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListDlpJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager", + "shortName": "list_dlp_jobs" + }, + "description": "Sample for ListDlpJobs", + "file": "dlp_v2_generated_dlp_service_list_dlp_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListDlpJobs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_dlp_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_dlp_jobs", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListDlpJobs", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListDlpJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListDlpJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager", + "shortName": "list_dlp_jobs" + }, + "description": "Sample for ListDlpJobs", + "file": "dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListDlpJobs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_info_types", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListInfoTypes", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListInfoTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListInfoTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ListInfoTypesResponse", + "shortName": "list_info_types" + }, + "description": "Sample for ListInfoTypes", + "file": "dlp_v2_generated_dlp_service_list_info_types_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListInfoTypes_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_info_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_info_types", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListInfoTypes", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListInfoTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListInfoTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ListInfoTypesResponse", + "shortName": "list_info_types" + }, + "description": "Sample for ListInfoTypes", + "file": "dlp_v2_generated_dlp_service_list_info_types_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListInfoTypes_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_info_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_inspect_templates", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListInspectTemplates", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListInspectTemplates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListInspectTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager", + "shortName": "list_inspect_templates" + }, + "description": "Sample for ListInspectTemplates", + "file": "dlp_v2_generated_dlp_service_list_inspect_templates_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListInspectTemplates_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_inspect_templates_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_inspect_templates", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListInspectTemplates", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListInspectTemplates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListInspectTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager", + "shortName": "list_inspect_templates" + }, + "description": "Sample for ListInspectTemplates", + "file": "dlp_v2_generated_dlp_service_list_inspect_templates_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListInspectTemplates_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_inspect_templates_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_job_triggers", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListJobTriggers", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListJobTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListJobTriggersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager", + "shortName": "list_job_triggers" + }, + "description": "Sample for ListJobTriggers", + "file": "dlp_v2_generated_dlp_service_list_job_triggers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListJobTriggers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_job_triggers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_job_triggers", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListJobTriggers", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListJobTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListJobTriggersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager", + "shortName": "list_job_triggers" + }, + "description": "Sample for ListJobTriggers", + "file": "dlp_v2_generated_dlp_service_list_job_triggers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListJobTriggers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_job_triggers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_stored_info_types", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListStoredInfoTypes", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListStoredInfoTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListStoredInfoTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager", + "shortName": "list_stored_info_types" + }, + "description": "Sample for ListStoredInfoTypes", + "file": "dlp_v2_generated_dlp_service_list_stored_info_types_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListStoredInfoTypes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_stored_info_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_stored_info_types", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListStoredInfoTypes", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListStoredInfoTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListStoredInfoTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager", + "shortName": "list_stored_info_types" + }, + "description": "Sample for ListStoredInfoTypes", + "file": "dlp_v2_generated_dlp_service_list_stored_info_types_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListStoredInfoTypes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_stored_info_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.redact_image", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.RedactImage", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "RedactImage" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.RedactImageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.RedactImageResponse", + "shortName": "redact_image" + }, + "description": "Sample for RedactImage", + "file": "dlp_v2_generated_dlp_service_redact_image_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_RedactImage_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_redact_image_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.redact_image", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.RedactImage", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "RedactImage" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.RedactImageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.RedactImageResponse", + "shortName": "redact_image" + }, + "description": "Sample for RedactImage", + "file": "dlp_v2_generated_dlp_service_redact_image_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_RedactImage_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_redact_image_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.reidentify_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ReidentifyContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ReidentifyContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ReidentifyContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ReidentifyContentResponse", + "shortName": "reidentify_content" + }, + "description": "Sample for ReidentifyContent", + "file": "dlp_v2_generated_dlp_service_reidentify_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ReidentifyContent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_reidentify_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.reidentify_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ReidentifyContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ReidentifyContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ReidentifyContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ReidentifyContentResponse", + "shortName": "reidentify_content" + }, + "description": "Sample for ReidentifyContent", + "file": "dlp_v2_generated_dlp_service_reidentify_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ReidentifyContent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_reidentify_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "deidentify_template", + "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "update_deidentify_template" + }, + "description": "Sample for UpdateDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_update_deidentify_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_deidentify_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "deidentify_template", + "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "update_deidentify_template" + }, + "description": "Sample for UpdateDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_update_deidentify_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_deidentify_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "inspect_template", + "type": "google.cloud.dlp_v2.types.InspectTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "update_inspect_template" + }, + "description": "Sample for UpdateInspectTemplate", + "file": "dlp_v2_generated_dlp_service_update_inspect_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateInspectTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_inspect_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "inspect_template", + "type": "google.cloud.dlp_v2.types.InspectTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "update_inspect_template" + }, + "description": "Sample for UpdateInspectTemplate", + "file": "dlp_v2_generated_dlp_service_update_inspect_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateInspectTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_inspect_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "job_trigger", + "type": "google.cloud.dlp_v2.types.JobTrigger" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "update_job_trigger" + }, + "description": "Sample for UpdateJobTrigger", + "file": "dlp_v2_generated_dlp_service_update_job_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateJobTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "job_trigger", + "type": "google.cloud.dlp_v2.types.JobTrigger" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "update_job_trigger" + }, + "description": "Sample for UpdateJobTrigger", + "file": "dlp_v2_generated_dlp_service_update_job_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateJobTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "config", + "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "update_stored_info_type" + }, + "description": "Sample for UpdateStoredInfoType", + "file": "dlp_v2_generated_dlp_service_update_stored_info_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateStoredInfoType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_stored_info_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "config", + "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "update_stored_info_type" + }, + "description": "Sample for UpdateStoredInfoType", + "file": "dlp_v2_generated_dlp_service_update_stored_info_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateStoredInfoType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_stored_info_type_sync.py" + } + ] +} diff --git a/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py b/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py new file mode 100644 index 00000000..9adcd0d5 --- /dev/null +++ b/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py @@ -0,0 +1,209 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class dlpCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'activate_job_trigger': ('name', ), + 'cancel_dlp_job': ('name', ), + 'create_deidentify_template': ('parent', 'deidentify_template', 'template_id', 'location_id', ), + 'create_dlp_job': ('parent', 'inspect_job', 'risk_job', 'job_id', 'location_id', ), + 'create_inspect_template': ('parent', 'inspect_template', 'template_id', 'location_id', ), + 'create_job_trigger': ('parent', 'job_trigger', 'trigger_id', 'location_id', ), + 'create_stored_info_type': ('parent', 'config', 'stored_info_type_id', 'location_id', ), + 'deidentify_content': ('parent', 'deidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'deidentify_template_name', 'location_id', ), + 'delete_deidentify_template': ('name', ), + 'delete_dlp_job': ('name', ), + 'delete_inspect_template': ('name', ), + 'delete_job_trigger': ('name', ), + 'delete_stored_info_type': ('name', ), + 'finish_dlp_job': ('name', ), + 'get_deidentify_template': ('name', ), + 'get_dlp_job': ('name', ), + 'get_inspect_template': ('name', ), + 'get_job_trigger': ('name', ), + 'get_stored_info_type': ('name', ), + 'hybrid_inspect_dlp_job': ('name', 'hybrid_item', ), + 'hybrid_inspect_job_trigger': ('name', 'hybrid_item', ), + 'inspect_content': ('parent', 'inspect_config', 'item', 'inspect_template_name', 'location_id', ), + 'list_deidentify_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'list_dlp_jobs': ('parent', 'filter', 'page_size', 'page_token', 'type_', 'order_by', 'location_id', ), + 'list_info_types': ('parent', 'language_code', 'filter', 'location_id', ), + 'list_inspect_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'list_job_triggers': ('parent', 'page_token', 'page_size', 'order_by', 'filter', 'type_', 'location_id', ), + 'list_stored_info_types': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'redact_image': ('parent', 'location_id', 'inspect_config', 'image_redaction_configs', 'include_findings', 'byte_item', ), + 'reidentify_content': ('parent', 'reidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'reidentify_template_name', 'location_id', ), + 'update_deidentify_template': ('name', 'deidentify_template', 'update_mask', ), + 'update_inspect_template': ('name', 'inspect_template', 'update_mask', ), + 'update_job_trigger': ('name', 'job_trigger', 'update_mask', ), + 'update_stored_info_type': ('name', 'config', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=dlpCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the dlp client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v2/setup.py b/owl-bot-staging/v2/setup.py new file mode 100644 index 00000000..2b4eb21b --- /dev/null +++ b/owl-bot-staging/v2/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-dlp' + + +description = "Google Cloud Dlp API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/dlp/gapic_version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/python-dlp" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/v2/testing/constraints-3.10.txt b/owl-bot-staging/v2/testing/constraints-3.10.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.11.txt b/owl-bot-staging/v2/testing/constraints-3.11.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.12.txt b/owl-bot-staging/v2/testing/constraints-3.12.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.7.txt b/owl-bot-staging/v2/testing/constraints-3.7.txt new file mode 100644 index 00000000..6c44adfe --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/owl-bot-staging/v2/testing/constraints-3.8.txt b/owl-bot-staging/v2/testing/constraints-3.8.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.9.txt b/owl-bot-staging/v2/testing/constraints-3.9.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v2/tests/__init__.py b/owl-bot-staging/v2/tests/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v2/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/__init__.py b/owl-bot-staging/v2/tests/unit/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py new file mode 100644 index 00000000..64618efd --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py @@ -0,0 +1,17404 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dlp_v2.services.dlp_service import DlpServiceAsyncClient +from google.cloud.dlp_v2.services.dlp_service import DlpServiceClient +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.services.dlp_service import transports +from google.cloud.dlp_v2.types import dlp +from google.cloud.dlp_v2.types import storage +from google.cloud.location import locations_pb2 +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DlpServiceClient._get_default_mtls_endpoint(None) is None + assert DlpServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DlpServiceClient, "grpc"), + (DlpServiceAsyncClient, "grpc_asyncio"), + (DlpServiceClient, "rest"), +]) +def test_dlp_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dlp.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dlp.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DlpServiceGrpcTransport, "grpc"), + (transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DlpServiceRestTransport, "rest"), +]) +def test_dlp_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DlpServiceClient, "grpc"), + (DlpServiceAsyncClient, "grpc_asyncio"), + (DlpServiceClient, "rest"), +]) +def test_dlp_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dlp.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dlp.googleapis.com' + ) + + +def test_dlp_service_client_get_transport_class(): + transport = DlpServiceClient.get_transport_class() + available_transports = [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceRestTransport, + ] + assert transport in available_transports + + transport = DlpServiceClient.get_transport_class("grpc") + assert transport == transports.DlpServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), +]) +@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) +@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) +def test_dlp_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "true"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "false"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "true"), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "false"), +]) +@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) +@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_dlp_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + DlpServiceClient, DlpServiceAsyncClient +]) +@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) +@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) +def test_dlp_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), +]) +def test_dlp_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", grpc_helpers), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest", None), +]) +def test_dlp_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_dlp_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = DlpServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", grpc_helpers), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_dlp_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dlp.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="dlp.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.InspectContentRequest, + dict, +]) +def test_inspect_content(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectContentResponse( + ) + response = client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.InspectContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +def test_inspect_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + client.inspect_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.InspectContentRequest() + +@pytest.mark.asyncio +async def test_inspect_content_async(transport: str = 'grpc_asyncio', request_type=dlp.InspectContentRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse( + )) + response = await client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.InspectContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +@pytest.mark.asyncio +async def test_inspect_content_async_from_dict(): + await test_inspect_content_async(request_type=dict) + + +def test_inspect_content_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.InspectContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + call.return_value = dlp.InspectContentResponse() + client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_inspect_content_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.InspectContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse()) + await client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.RedactImageRequest, + dict, +]) +def test_redact_image(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.RedactImageResponse( + redacted_image=b'redacted_image_blob', + extracted_text='extracted_text_value', + ) + response = client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.RedactImageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + assert response.redacted_image == b'redacted_image_blob' + assert response.extracted_text == 'extracted_text_value' + + +def test_redact_image_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + client.redact_image() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.RedactImageRequest() + +@pytest.mark.asyncio +async def test_redact_image_async(transport: str = 'grpc_asyncio', request_type=dlp.RedactImageRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse( + redacted_image=b'redacted_image_blob', + extracted_text='extracted_text_value', + )) + response = await client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.RedactImageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + assert response.redacted_image == b'redacted_image_blob' + assert response.extracted_text == 'extracted_text_value' + + +@pytest.mark.asyncio +async def test_redact_image_async_from_dict(): + await test_redact_image_async(request_type=dict) + + +def test_redact_image_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.RedactImageRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + call.return_value = dlp.RedactImageResponse() + client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_redact_image_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.RedactImageRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse()) + await client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.DeidentifyContentRequest, + dict, +]) +def test_deidentify_content(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyContentResponse( + ) + response = client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +def test_deidentify_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + client.deidentify_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeidentifyContentRequest() + +@pytest.mark.asyncio +async def test_deidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.DeidentifyContentRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse( + )) + response = await client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +@pytest.mark.asyncio +async def test_deidentify_content_async_from_dict(): + await test_deidentify_content_async(request_type=dict) + + +def test_deidentify_content_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeidentifyContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + call.return_value = dlp.DeidentifyContentResponse() + client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_deidentify_content_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeidentifyContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse()) + await client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.ReidentifyContentRequest, + dict, +]) +def test_reidentify_content(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ReidentifyContentResponse( + ) + response = client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ReidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +def test_reidentify_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + client.reidentify_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ReidentifyContentRequest() + +@pytest.mark.asyncio +async def test_reidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.ReidentifyContentRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse( + )) + response = await client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ReidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +@pytest.mark.asyncio +async def test_reidentify_content_async_from_dict(): + await test_reidentify_content_async(request_type=dict) + + +def test_reidentify_content_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ReidentifyContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + call.return_value = dlp.ReidentifyContentResponse() + client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_reidentify_content_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ReidentifyContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse()) + await client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.ListInfoTypesRequest, + dict, +]) +def test_list_info_types(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse( + ) + response = client.list_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +def test_list_info_types_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + client.list_info_types() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInfoTypesRequest() + +@pytest.mark.asyncio +async def test_list_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInfoTypesRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse( + )) + response = await client.list_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +@pytest.mark.asyncio +async def test_list_info_types_async_from_dict(): + await test_list_info_types_async(request_type=dict) + + +def test_list_info_types_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_info_types_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_info_types( + dlp.ListInfoTypesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_info_types_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_info_types_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_info_types( + dlp.ListInfoTypesRequest(), + parent='parent_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateInspectTemplateRequest, + dict, +]) +def test_create_inspect_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + client.create_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateInspectTemplateRequest() + +@pytest.mark.asyncio +async def test_create_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_create_inspect_template_async_from_dict(): + await test_create_inspect_template_async(request_type=dict) + + +def test_create_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateInspectTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + call.return_value = dlp.InspectTemplate() + client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateInspectTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + await client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_inspect_template( + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name='name_value') + assert arg == mock_val + + +def test_create_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_inspect_template( + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateInspectTemplateRequest, + dict, +]) +def test_update_inspect_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + client.update_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateInspectTemplateRequest() + +@pytest.mark.asyncio +async def test_update_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_update_inspect_template_async_from_dict(): + await test_update_inspect_template_async(request_type=dict) + + +def test_update_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + call.return_value = dlp.InspectTemplate() + client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + await client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_inspect_template( + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_inspect_template( + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetInspectTemplateRequest, + dict, +]) +def test_get_inspect_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + client.get_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetInspectTemplateRequest() + +@pytest.mark.asyncio +async def test_get_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_get_inspect_template_async_from_dict(): + await test_get_inspect_template_async(request_type=dict) + + +def test_get_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + call.return_value = dlp.InspectTemplate() + client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + await client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_inspect_template( + dlp.GetInspectTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_inspect_template( + dlp.GetInspectTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListInspectTemplatesRequest, + dict, +]) +def test_list_inspect_templates(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInspectTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_inspect_templates_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + client.list_inspect_templates() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInspectTemplatesRequest() + +@pytest.mark.asyncio +async def test_list_inspect_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInspectTemplatesRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInspectTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_from_dict(): + await test_list_inspect_templates_async(request_type=dict) + + +def test_list_inspect_templates_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListInspectTemplatesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + call.return_value = dlp.ListInspectTemplatesResponse() + client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_inspect_templates_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListInspectTemplatesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) + await client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_inspect_templates_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_inspect_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_inspect_templates_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_inspect_templates_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_inspect_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_inspect_templates_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_inspect_templates_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_inspect_templates(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.InspectTemplate) + for i in results) +def test_list_inspect_templates_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + pages = list(client.list_inspect_templates(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_inspect_templates(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.InspectTemplate) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_inspect_templates(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteInspectTemplateRequest, + dict, +]) +def test_delete_inspect_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + client.delete_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteInspectTemplateRequest() + +@pytest.mark.asyncio +async def test_delete_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_inspect_template_async_from_dict(): + await test_delete_inspect_template_async(request_type=dict) + + +def test_delete_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + call.return_value = None + client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateDeidentifyTemplateRequest, + dict, +]) +def test_create_deidentify_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + client.create_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDeidentifyTemplateRequest() + +@pytest.mark.asyncio +async def test_create_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_create_deidentify_template_async_from_dict(): + await test_create_deidentify_template_async(request_type=dict) + + +def test_create_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDeidentifyTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + call.return_value = dlp.DeidentifyTemplate() + client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDeidentifyTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + await client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_deidentify_template( + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name='name_value') + assert arg == mock_val + + +def test_create_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_deidentify_template( + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateDeidentifyTemplateRequest, + dict, +]) +def test_update_deidentify_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + client.update_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateDeidentifyTemplateRequest() + +@pytest.mark.asyncio +async def test_update_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_update_deidentify_template_async_from_dict(): + await test_update_deidentify_template_async(request_type=dict) + + +def test_update_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + call.return_value = dlp.DeidentifyTemplate() + client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + await client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_deidentify_template( + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_deidentify_template( + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetDeidentifyTemplateRequest, + dict, +]) +def test_get_deidentify_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + client.get_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDeidentifyTemplateRequest() + +@pytest.mark.asyncio +async def test_get_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_get_deidentify_template_async_from_dict(): + await test_get_deidentify_template_async(request_type=dict) + + +def test_get_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + call.return_value = dlp.DeidentifyTemplate() + client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + await client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListDeidentifyTemplatesRequest, + dict, +]) +def test_list_deidentify_templates(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDeidentifyTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_deidentify_templates_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + client.list_deidentify_templates() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDeidentifyTemplatesRequest() + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDeidentifyTemplatesRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDeidentifyTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_from_dict(): + await test_list_deidentify_templates_async(request_type=dict) + + +def test_list_deidentify_templates_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDeidentifyTemplatesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + call.return_value = dlp.ListDeidentifyTemplatesResponse() + client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDeidentifyTemplatesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) + await client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_deidentify_templates_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_deidentify_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_deidentify_templates_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_deidentify_templates_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_deidentify_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_deidentify_templates_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_deidentify_templates_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_deidentify_templates(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) + for i in results) +def test_list_deidentify_templates_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + pages = list(client.list_deidentify_templates(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_deidentify_templates(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_deidentify_templates(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteDeidentifyTemplateRequest, + dict, +]) +def test_delete_deidentify_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + client.delete_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDeidentifyTemplateRequest() + +@pytest.mark.asyncio +async def test_delete_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_async_from_dict(): + await test_delete_deidentify_template_async(request_type=dict) + + +def test_delete_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + call.return_value = None + client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateJobTriggerRequest, + dict, +]) +def test_create_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + response = client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_create_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + client.create_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateJobTriggerRequest() + +@pytest.mark.asyncio +async def test_create_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + )) + response = await client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.asyncio +async def test_create_job_trigger_async_from_dict(): + await test_create_job_trigger_async(request_type=dict) + + +def test_create_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateJobTriggerRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + call.return_value = dlp.JobTrigger() + client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateJobTriggerRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + await client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_job_trigger( + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name='name_value') + assert arg == mock_val + + +def test_create_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_job_trigger( + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateJobTriggerRequest, + dict, +]) +def test_update_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + response = client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_update_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + client.update_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateJobTriggerRequest() + +@pytest.mark.asyncio +async def test_update_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + )) + response = await client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.asyncio +async def test_update_job_trigger_async_from_dict(): + await test_update_job_trigger_async(request_type=dict) + + +def test_update_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + call.return_value = dlp.JobTrigger() + client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + await client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_job_trigger( + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_job_trigger( + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.HybridInspectJobTriggerRequest, + dict, +]) +def test_hybrid_inspect_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse( + ) + response = client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + client.hybrid_inspect_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectJobTriggerRequest() + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( + )) + response = await client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_async_from_dict(): + await test_hybrid_inspect_job_trigger_async(request_type=dict) + + +def test_hybrid_inspect_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + call.return_value = dlp.HybridInspectResponse() + client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + await client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_hybrid_inspect_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.hybrid_inspect_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_hybrid_inspect_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.hybrid_inspect_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetJobTriggerRequest, + dict, +]) +def test_get_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + response = client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_get_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + client.get_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetJobTriggerRequest() + +@pytest.mark.asyncio +async def test_get_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.GetJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + )) + response = await client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.asyncio +async def test_get_job_trigger_async_from_dict(): + await test_get_job_trigger_async(request_type=dict) + + +def test_get_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + call.return_value = dlp.JobTrigger() + client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + await client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job_trigger( + dlp.GetJobTriggerRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job_trigger( + dlp.GetJobTriggerRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListJobTriggersRequest, + dict, +]) +def test_list_job_triggers(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse( + next_page_token='next_page_token_value', + ) + response = client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListJobTriggersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_job_triggers_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + client.list_job_triggers() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListJobTriggersRequest() + +@pytest.mark.asyncio +async def test_list_job_triggers_async(transport: str = 'grpc_asyncio', request_type=dlp.ListJobTriggersRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListJobTriggersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_job_triggers_async_from_dict(): + await test_list_job_triggers_async(request_type=dict) + + +def test_list_job_triggers_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListJobTriggersRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + call.return_value = dlp.ListJobTriggersResponse() + client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_job_triggers_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListJobTriggersRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) + await client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_job_triggers_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_job_triggers( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_job_triggers_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_job_triggers( + dlp.ListJobTriggersRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_job_triggers_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_job_triggers( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_job_triggers_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_job_triggers( + dlp.ListJobTriggersRequest(), + parent='parent_value', + ) + + +def test_list_job_triggers_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_job_triggers(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.JobTrigger) + for i in results) +def test_list_job_triggers_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + pages = list(client.list_job_triggers(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_job_triggers_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_job_triggers(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.JobTrigger) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_job_triggers_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_job_triggers(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteJobTriggerRequest, + dict, +]) +def test_delete_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + client.delete_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteJobTriggerRequest() + +@pytest.mark.asyncio +async def test_delete_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_job_trigger_async_from_dict(): + await test_delete_job_trigger_async(request_type=dict) + + +def test_delete_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + call.return_value = None + client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ActivateJobTriggerRequest, + dict, +]) +def test_activate_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + response = client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ActivateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_activate_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + client.activate_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ActivateJobTriggerRequest() + +@pytest.mark.asyncio +async def test_activate_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.ActivateJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + )) + response = await client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ActivateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +@pytest.mark.asyncio +async def test_activate_job_trigger_async_from_dict(): + await test_activate_job_trigger_async(request_type=dict) + + +def test_activate_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ActivateJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + call.return_value = dlp.DlpJob() + client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_activate_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ActivateJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + await client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateDlpJobRequest, + dict, +]) +def test_create_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + response = client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_create_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + client.create_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDlpJobRequest() + +@pytest.mark.asyncio +async def test_create_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + )) + response = await client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +@pytest.mark.asyncio +async def test_create_dlp_job_async_from_dict(): + await test_create_dlp_job_async(request_type=dict) + + +def test_create_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDlpJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + call.return_value = dlp.DlpJob() + client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDlpJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + await client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_dlp_job( + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) + + +def test_create_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + +@pytest.mark.asyncio +async def test_create_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_dlp_job( + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) + +@pytest.mark.asyncio +async def test_create_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListDlpJobsRequest, + dict, +]) +def test_list_dlp_jobs(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDlpJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_dlp_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + client.list_dlp_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDlpJobsRequest() + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDlpJobsRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDlpJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_from_dict(): + await test_list_dlp_jobs_async(request_type=dict) + + +def test_list_dlp_jobs_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDlpJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + call.return_value = dlp.ListDlpJobsResponse() + client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDlpJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) + await client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_dlp_jobs_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_dlp_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_dlp_jobs_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_dlp_jobs_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_dlp_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_dlp_jobs_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), + parent='parent_value', + ) + + +def test_list_dlp_jobs_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_dlp_jobs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DlpJob) + for i in results) +def test_list_dlp_jobs_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + pages = list(client.list_dlp_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_dlp_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.DlpJob) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_dlp_jobs(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.GetDlpJobRequest, + dict, +]) +def test_get_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + response = client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_get_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + client.get_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDlpJobRequest() + +@pytest.mark.asyncio +async def test_get_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + )) + response = await client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +@pytest.mark.asyncio +async def test_get_dlp_job_async_from_dict(): + await test_get_dlp_job_async(request_type=dict) + + +def test_get_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + call.return_value = dlp.DlpJob() + client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + await client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dlp_job( + dlp.GetDlpJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_dlp_job( + dlp.GetDlpJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteDlpJobRequest, + dict, +]) +def test_delete_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + client.delete_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDlpJobRequest() + +@pytest.mark.asyncio +async def test_delete_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_dlp_job_async_from_dict(): + await test_delete_dlp_job_async(request_type=dict) + + +def test_delete_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + call.return_value = None + client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CancelDlpJobRequest, + dict, +]) +def test_cancel_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CancelDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + client.cancel_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CancelDlpJobRequest() + +@pytest.mark.asyncio +async def test_cancel_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CancelDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CancelDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_dlp_job_async_from_dict(): + await test_cancel_dlp_job_async(request_type=dict) + + +def test_cancel_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CancelDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + call.return_value = None + client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_cancel_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CancelDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateStoredInfoTypeRequest, + dict, +]) +def test_create_stored_info_type(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType( + name='name_value', + ) + response = client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_create_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + client.create_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateStoredInfoTypeRequest() + +@pytest.mark.asyncio +async def test_create_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( + name='name_value', + )) + response = await client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_create_stored_info_type_async_from_dict(): + await test_create_stored_info_type_async(request_type=dict) + + +def test_create_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateStoredInfoTypeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + call.return_value = dlp.StoredInfoType() + client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateStoredInfoTypeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + await client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_stored_info_type( + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') + assert arg == mock_val + + +def test_create_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + +@pytest.mark.asyncio +async def test_create_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_stored_info_type( + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateStoredInfoTypeRequest, + dict, +]) +def test_update_stored_info_type(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType( + name='name_value', + ) + response = client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_update_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + client.update_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateStoredInfoTypeRequest() + +@pytest.mark.asyncio +async def test_update_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( + name='name_value', + )) + response = await client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_update_stored_info_type_async_from_dict(): + await test_update_stored_info_type_async(request_type=dict) + + +def test_update_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + call.return_value = dlp.StoredInfoType() + client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + await client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_stored_info_type( + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_stored_info_type( + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetStoredInfoTypeRequest, + dict, +]) +def test_get_stored_info_type(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType( + name='name_value', + ) + response = client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_get_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + client.get_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetStoredInfoTypeRequest() + +@pytest.mark.asyncio +async def test_get_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.GetStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( + name='name_value', + )) + response = await client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_stored_info_type_async_from_dict(): + await test_get_stored_info_type_async(request_type=dict) + + +def test_get_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + call.return_value = dlp.StoredInfoType() + client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + await client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListStoredInfoTypesRequest, + dict, +]) +def test_list_stored_info_types(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListStoredInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_stored_info_types_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + client.list_stored_info_types() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListStoredInfoTypesRequest() + +@pytest.mark.asyncio +async def test_list_stored_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListStoredInfoTypesRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListStoredInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_from_dict(): + await test_list_stored_info_types_async(request_type=dict) + + +def test_list_stored_info_types_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListStoredInfoTypesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + call.return_value = dlp.ListStoredInfoTypesResponse() + client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_stored_info_types_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListStoredInfoTypesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) + await client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_stored_info_types_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_stored_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_stored_info_types_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_stored_info_types_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_stored_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_stored_info_types_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), + parent='parent_value', + ) + + +def test_list_stored_info_types_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_stored_info_types(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.StoredInfoType) + for i in results) +def test_list_stored_info_types_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + pages = list(client.list_stored_info_types(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_stored_info_types(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.StoredInfoType) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_stored_info_types(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteStoredInfoTypeRequest, + dict, +]) +def test_delete_stored_info_type(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + client.delete_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteStoredInfoTypeRequest() + +@pytest.mark.asyncio +async def test_delete_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_async_from_dict(): + await test_delete_stored_info_type_async(request_type=dict) + + +def test_delete_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + call.return_value = None + client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.HybridInspectDlpJobRequest, + dict, +]) +def test_hybrid_inspect_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse( + ) + response = client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + client.hybrid_inspect_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectDlpJobRequest() + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( + )) + response = await client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_async_from_dict(): + await test_hybrid_inspect_dlp_job_async(request_type=dict) + + +def test_hybrid_inspect_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + call.return_value = dlp.HybridInspectResponse() + client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + await client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_hybrid_inspect_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.hybrid_inspect_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_hybrid_inspect_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.hybrid_inspect_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.FinishDlpJobRequest, + dict, +]) +def test_finish_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.FinishDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_finish_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + client.finish_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.FinishDlpJobRequest() + +@pytest.mark.asyncio +async def test_finish_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.FinishDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.FinishDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_finish_dlp_job_async_from_dict(): + await test_finish_dlp_job_async(request_type=dict) + + +def test_finish_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.FinishDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + call.return_value = None + client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_finish_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.FinishDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.InspectContentRequest, + dict, +]) +def test_inspect_content_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectContentResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.inspect_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_inspect_content_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_inspect_content") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_inspect_content") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.InspectContentRequest.pb(dlp.InspectContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectContentResponse.to_json(dlp.InspectContentResponse()) + + request = dlp.InspectContentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectContentResponse() + + client.inspect_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_inspect_content_rest_bad_request(transport: str = 'rest', request_type=dlp.InspectContentRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.inspect_content(request) + + +def test_inspect_content_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.RedactImageRequest, + dict, +]) +def test_redact_image_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.RedactImageResponse( + redacted_image=b'redacted_image_blob', + extracted_text='extracted_text_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.RedactImageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.redact_image(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + assert response.redacted_image == b'redacted_image_blob' + assert response.extracted_text == 'extracted_text_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_redact_image_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_redact_image") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_redact_image") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.RedactImageRequest.pb(dlp.RedactImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.RedactImageResponse.to_json(dlp.RedactImageResponse()) + + request = dlp.RedactImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.RedactImageResponse() + + client.redact_image(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_redact_image_rest_bad_request(transport: str = 'rest', request_type=dlp.RedactImageRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.redact_image(request) + + +def test_redact_image_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeidentifyContentRequest, + dict, +]) +def test_deidentify_content_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyContentResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.deidentify_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_deidentify_content_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_deidentify_content") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_deidentify_content") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.DeidentifyContentRequest.pb(dlp.DeidentifyContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyContentResponse.to_json(dlp.DeidentifyContentResponse()) + + request = dlp.DeidentifyContentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyContentResponse() + + client.deidentify_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_deidentify_content_rest_bad_request(transport: str = 'rest', request_type=dlp.DeidentifyContentRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.deidentify_content(request) + + +def test_deidentify_content_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ReidentifyContentRequest, + dict, +]) +def test_reidentify_content_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ReidentifyContentResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ReidentifyContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.reidentify_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +def test_reidentify_content_rest_required_fields(request_type=dlp.ReidentifyContentRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reidentify_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reidentify_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ReidentifyContentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ReidentifyContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.reidentify_content(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_reidentify_content_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.reidentify_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reidentify_content_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_reidentify_content") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_reidentify_content") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ReidentifyContentRequest.pb(dlp.ReidentifyContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ReidentifyContentResponse.to_json(dlp.ReidentifyContentResponse()) + + request = dlp.ReidentifyContentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ReidentifyContentResponse() + + client.reidentify_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_reidentify_content_rest_bad_request(transport: str = 'rest', request_type=dlp.ReidentifyContentRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.reidentify_content(request) + + +def test_reidentify_content_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListInfoTypesRequest, + dict, +]) +def test_list_info_types_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInfoTypesResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_info_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_info_types_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_info_types") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_info_types") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListInfoTypesRequest.pb(dlp.ListInfoTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListInfoTypesResponse.to_json(dlp.ListInfoTypesResponse()) + + request = dlp.ListInfoTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListInfoTypesResponse() + + client.list_info_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_info_types_rest_bad_request(transport: str = 'rest', request_type=dlp.ListInfoTypesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_info_types(request) + + +def test_list_info_types_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInfoTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_info_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/infoTypes" % client.transport._host, args[1]) + + +def test_list_info_types_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_info_types( + dlp.ListInfoTypesRequest(), + parent='parent_value', + ) + + +def test_list_info_types_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateInspectTemplateRequest, + dict, +]) +def test_create_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_inspect_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_inspect_template_rest_required_fields(request_type=dlp.CreateInspectTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_inspect_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "inspectTemplate", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_inspect_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_inspect_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateInspectTemplateRequest.pb(dlp.CreateInspectTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) + + request = dlp.CreateInspectTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectTemplate() + + client.create_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_inspect_template(request) + + +def test_create_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/inspectTemplates" % client.transport._host, args[1]) + + +def test_create_inspect_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + +def test_create_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateInspectTemplateRequest, + dict, +]) +def test_update_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_inspect_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_inspect_template_rest_required_fields(request_type=dlp.UpdateInspectTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_inspect_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_inspect_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_inspect_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateInspectTemplateRequest.pb(dlp.UpdateInspectTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) + + request = dlp.UpdateInspectTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectTemplate() + + client.update_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_inspect_template(request) + + +def test_update_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/inspectTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, args[1]) + + +def test_update_inspect_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetInspectTemplateRequest, + dict, +]) +def test_get_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_inspect_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_inspect_template_rest_required_fields(request_type=dlp.GetInspectTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_inspect_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_inspect_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_inspect_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetInspectTemplateRequest.pb(dlp.GetInspectTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) + + request = dlp.GetInspectTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectTemplate() + + client.get_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.GetInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_inspect_template(request) + + +def test_get_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/inspectTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, args[1]) + + +def test_get_inspect_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_inspect_template( + dlp.GetInspectTemplateRequest(), + name='name_value', + ) + + +def test_get_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListInspectTemplatesRequest, + dict, +]) +def test_list_inspect_templates_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInspectTemplatesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_inspect_templates(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_inspect_templates_rest_required_fields(request_type=dlp.ListInspectTemplatesRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_inspect_templates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_inspect_templates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListInspectTemplatesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_inspect_templates(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_inspect_templates_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_inspect_templates._get_unset_required_fields({}) + assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_inspect_templates_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_inspect_templates") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_inspect_templates") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListInspectTemplatesRequest.pb(dlp.ListInspectTemplatesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListInspectTemplatesResponse.to_json(dlp.ListInspectTemplatesResponse()) + + request = dlp.ListInspectTemplatesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListInspectTemplatesResponse() + + client.list_inspect_templates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_inspect_templates_rest_bad_request(transport: str = 'rest', request_type=dlp.ListInspectTemplatesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_inspect_templates(request) + + +def test_list_inspect_templates_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInspectTemplatesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_inspect_templates(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/inspectTemplates" % client.transport._host, args[1]) + + +def test_list_inspect_templates_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_inspect_templates_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListInspectTemplatesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'organizations/sample1'} + + pager = client.list_inspect_templates(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.InspectTemplate) + for i in results) + + pages = list(client.list_inspect_templates(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteInspectTemplateRequest, + dict, +]) +def test_delete_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_inspect_template(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_inspect_template_rest_required_fields(request_type=dlp.DeleteInspectTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_inspect_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_inspect_template") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteInspectTemplateRequest.pb(dlp.DeleteInspectTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteInspectTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_inspect_template(request) + + +def test_delete_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/inspectTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, args[1]) + + +def test_delete_inspect_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), + name='name_value', + ) + + +def test_delete_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateDeidentifyTemplateRequest, + dict, +]) +def test_create_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_deidentify_template_rest_required_fields(request_type=dlp.CreateDeidentifyTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_deidentify_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "deidentifyTemplate", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_deidentify_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_deidentify_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateDeidentifyTemplateRequest.pb(dlp.CreateDeidentifyTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) + + request = dlp.CreateDeidentifyTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyTemplate() + + client.create_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_deidentify_template(request) + + +def test_create_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/deidentifyTemplates" % client.transport._host, args[1]) + + +def test_create_deidentify_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + +def test_create_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateDeidentifyTemplateRequest, + dict, +]) +def test_update_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_deidentify_template_rest_required_fields(request_type=dlp.UpdateDeidentifyTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_deidentify_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_deidentify_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_deidentify_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateDeidentifyTemplateRequest.pb(dlp.UpdateDeidentifyTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) + + request = dlp.UpdateDeidentifyTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyTemplate() + + client.update_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_deidentify_template(request) + + +def test_update_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) + + +def test_update_deidentify_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetDeidentifyTemplateRequest, + dict, +]) +def test_get_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_deidentify_template_rest_required_fields(request_type=dlp.GetDeidentifyTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_deidentify_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_deidentify_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_deidentify_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetDeidentifyTemplateRequest.pb(dlp.GetDeidentifyTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) + + request = dlp.GetDeidentifyTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyTemplate() + + client.get_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.GetDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_deidentify_template(request) + + +def test_get_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) + + +def test_get_deidentify_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), + name='name_value', + ) + + +def test_get_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListDeidentifyTemplatesRequest, + dict, +]) +def test_list_deidentify_templates_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDeidentifyTemplatesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_deidentify_templates(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_deidentify_templates_rest_required_fields(request_type=dlp.ListDeidentifyTemplatesRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_deidentify_templates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_deidentify_templates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListDeidentifyTemplatesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_deidentify_templates(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_deidentify_templates_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_deidentify_templates._get_unset_required_fields({}) + assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_deidentify_templates_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_deidentify_templates") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_deidentify_templates") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListDeidentifyTemplatesRequest.pb(dlp.ListDeidentifyTemplatesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListDeidentifyTemplatesResponse.to_json(dlp.ListDeidentifyTemplatesResponse()) + + request = dlp.ListDeidentifyTemplatesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListDeidentifyTemplatesResponse() + + client.list_deidentify_templates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_deidentify_templates_rest_bad_request(transport: str = 'rest', request_type=dlp.ListDeidentifyTemplatesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_deidentify_templates(request) + + +def test_list_deidentify_templates_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDeidentifyTemplatesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_deidentify_templates(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/deidentifyTemplates" % client.transport._host, args[1]) + + +def test_list_deidentify_templates_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_deidentify_templates_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListDeidentifyTemplatesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'organizations/sample1'} + + pager = client.list_deidentify_templates(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) + for i in results) + + pages = list(client.list_deidentify_templates(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteDeidentifyTemplateRequest, + dict, +]) +def test_delete_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_deidentify_template_rest_required_fields(request_type=dlp.DeleteDeidentifyTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_deidentify_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_deidentify_template") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteDeidentifyTemplateRequest.pb(dlp.DeleteDeidentifyTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteDeidentifyTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_deidentify_template(request) + + +def test_delete_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) + + +def test_delete_deidentify_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), + name='name_value', + ) + + +def test_delete_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateJobTriggerRequest, + dict, +]) +def test_create_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_create_job_trigger_rest_required_fields(request_type=dlp.CreateJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "jobTrigger", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateJobTriggerRequest.pb(dlp.CreateJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) + + request = dlp.CreateJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.JobTrigger() + + client.create_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_job_trigger(request) + + +def test_create_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1]) + + +def test_create_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + +def test_create_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateJobTriggerRequest, + dict, +]) +def test_update_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_update_job_trigger_rest_required_fields(request_type=dlp.UpdateJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateJobTriggerRequest.pb(dlp.UpdateJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) + + request = dlp.UpdateJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.JobTrigger() + + client.update_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_job_trigger(request) + + +def test_update_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) + + +def test_update_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.HybridInspectJobTriggerRequest, + dict, +]) +def test_hybrid_inspect_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.hybrid_inspect_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_job_trigger_rest_required_fields(request_type=dlp.HybridInspectJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.hybrid_inspect_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_hybrid_inspect_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.hybrid_inspect_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_hybrid_inspect_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.HybridInspectJobTriggerRequest.pb(dlp.HybridInspectJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.HybridInspectResponse.to_json(dlp.HybridInspectResponse()) + + request = dlp.HybridInspectJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.HybridInspectResponse() + + client.hybrid_inspect_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_hybrid_inspect_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.HybridInspectJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.hybrid_inspect_job_trigger(request) + + +def test_hybrid_inspect_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.hybrid_inspect_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect" % client.transport._host, args[1]) + + +def test_hybrid_inspect_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), + name='name_value', + ) + + +def test_hybrid_inspect_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetJobTriggerRequest, + dict, +]) +def test_get_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_get_job_trigger_rest_required_fields(request_type=dlp.GetJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetJobTriggerRequest.pb(dlp.GetJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) + + request = dlp.GetJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.JobTrigger() + + client.get_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.GetJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_job_trigger(request) + + +def test_get_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) + + +def test_get_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job_trigger( + dlp.GetJobTriggerRequest(), + name='name_value', + ) + + +def test_get_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListJobTriggersRequest, + dict, +]) +def test_list_job_triggers_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListJobTriggersResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_job_triggers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_job_triggers_rest_required_fields(request_type=dlp.ListJobTriggersRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_job_triggers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_job_triggers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "location_id", "order_by", "page_size", "page_token", "type_", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListJobTriggersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_job_triggers(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_job_triggers_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_job_triggers._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "locationId", "orderBy", "pageSize", "pageToken", "type", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_job_triggers_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_job_triggers") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_job_triggers") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListJobTriggersRequest.pb(dlp.ListJobTriggersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListJobTriggersResponse.to_json(dlp.ListJobTriggersResponse()) + + request = dlp.ListJobTriggersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListJobTriggersResponse() + + client.list_job_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_job_triggers_rest_bad_request(transport: str = 'rest', request_type=dlp.ListJobTriggersRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_job_triggers(request) + + +def test_list_job_triggers_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListJobTriggersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_job_triggers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1]) + + +def test_list_job_triggers_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_job_triggers( + dlp.ListJobTriggersRequest(), + parent='parent_value', + ) + + +def test_list_job_triggers_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListJobTriggersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1'} + + pager = client.list_job_triggers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.JobTrigger) + for i in results) + + pages = list(client.list_job_triggers(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteJobTriggerRequest, + dict, +]) +def test_delete_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_job_trigger(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_trigger_rest_required_fields(request_type=dlp.DeleteJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_job_trigger") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteJobTriggerRequest.pb(dlp.DeleteJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_job_trigger(request) + + +def test_delete_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) + + +def test_delete_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), + name='name_value', + ) + + +def test_delete_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ActivateJobTriggerRequest, + dict, +]) +def test_activate_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.activate_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_activate_job_trigger_rest_required_fields(request_type=dlp.ActivateJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).activate_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).activate_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.activate_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_activate_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.activate_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_activate_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_activate_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_activate_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ActivateJobTriggerRequest.pb(dlp.ActivateJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) + + request = dlp.ActivateJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DlpJob() + + client.activate_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_activate_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.ActivateJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.activate_job_trigger(request) + + +def test_activate_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateDlpJobRequest, + dict, +]) +def test_create_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_dlp_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_create_dlp_job_rest_required_fields(request_type=dlp.CreateDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_dlp_job") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_dlp_job") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateDlpJobRequest.pb(dlp.CreateDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) + + request = dlp.CreateDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DlpJob() + + client.create_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_dlp_job(request) + + +def test_create_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1]) + + +def test_create_dlp_job_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + +def test_create_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListDlpJobsRequest, + dict, +]) +def test_list_dlp_jobs_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDlpJobsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_dlp_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_dlp_jobs_rest_required_fields(request_type=dlp.ListDlpJobsRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_dlp_jobs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_dlp_jobs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "location_id", "order_by", "page_size", "page_token", "type_", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListDlpJobsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_dlp_jobs(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_dlp_jobs_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_dlp_jobs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "locationId", "orderBy", "pageSize", "pageToken", "type", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_dlp_jobs_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_dlp_jobs") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_dlp_jobs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListDlpJobsRequest.pb(dlp.ListDlpJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListDlpJobsResponse.to_json(dlp.ListDlpJobsResponse()) + + request = dlp.ListDlpJobsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListDlpJobsResponse() + + client.list_dlp_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_dlp_jobs_rest_bad_request(transport: str = 'rest', request_type=dlp.ListDlpJobsRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_dlp_jobs(request) + + +def test_list_dlp_jobs_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDlpJobsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_dlp_jobs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1]) + + +def test_list_dlp_jobs_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), + parent='parent_value', + ) + + +def test_list_dlp_jobs_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListDlpJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1'} + + pager = client.list_dlp_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DlpJob) + for i in results) + + pages = list(client.list_dlp_jobs(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + dlp.GetDlpJobRequest, + dict, +]) +def test_get_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_dlp_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_get_dlp_job_rest_required_fields(request_type=dlp.GetDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_dlp_job") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_dlp_job") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetDlpJobRequest.pb(dlp.GetDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) + + request = dlp.GetDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DlpJob() + + client.get_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.GetDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_dlp_job(request) + + +def test_get_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/dlpJobs/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1]) + + +def test_get_dlp_job_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dlp_job( + dlp.GetDlpJobRequest(), + name='name_value', + ) + + +def test_get_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteDlpJobRequest, + dict, +]) +def test_delete_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_dlp_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dlp_job_rest_required_fields(request_type=dlp.DeleteDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_dlp_job") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteDlpJobRequest.pb(dlp.DeleteDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_dlp_job(request) + + +def test_delete_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/dlpJobs/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1]) + + +def test_delete_dlp_job_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), + name='name_value', + ) + + +def test_delete_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CancelDlpJobRequest, + dict, +]) +def test_cancel_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.cancel_dlp_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_dlp_job_rest_required_fields(request_type=dlp.CancelDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.cancel_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_cancel_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.cancel_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_cancel_dlp_job") as pre: + pre.assert_not_called() + pb_message = dlp.CancelDlpJobRequest.pb(dlp.CancelDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.CancelDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.cancel_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_cancel_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.CancelDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_dlp_job(request) + + +def test_cancel_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateStoredInfoTypeRequest, + dict, +]) +def test_create_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_create_stored_info_type_rest_required_fields(request_type=dlp.CreateStoredInfoTypeRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_stored_info_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "config", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_stored_info_type") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_stored_info_type") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateStoredInfoTypeRequest.pb(dlp.CreateStoredInfoTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) + + request = dlp.CreateStoredInfoTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.StoredInfoType() + + client.create_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_stored_info_type(request) + + +def test_create_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, args[1]) + + +def test_create_stored_info_type_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + +def test_create_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateStoredInfoTypeRequest, + dict, +]) +def test_update_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_update_stored_info_type_rest_required_fields(request_type=dlp.UpdateStoredInfoTypeRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_stored_info_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_stored_info_type") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_stored_info_type") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateStoredInfoTypeRequest.pb(dlp.UpdateStoredInfoTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) + + request = dlp.UpdateStoredInfoTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.StoredInfoType() + + client.update_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_stored_info_type(request) + + +def test_update_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) + + +def test_update_stored_info_type_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetStoredInfoTypeRequest, + dict, +]) +def test_get_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_get_stored_info_type_rest_required_fields(request_type=dlp.GetStoredInfoTypeRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_stored_info_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_stored_info_type") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_stored_info_type") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetStoredInfoTypeRequest.pb(dlp.GetStoredInfoTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) + + request = dlp.GetStoredInfoTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.StoredInfoType() + + client.get_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.GetStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_stored_info_type(request) + + +def test_get_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) + + +def test_get_stored_info_type_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), + name='name_value', + ) + + +def test_get_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListStoredInfoTypesRequest, + dict, +]) +def test_list_stored_info_types_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListStoredInfoTypesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_stored_info_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_stored_info_types_rest_required_fields(request_type=dlp.ListStoredInfoTypesRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_stored_info_types._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_stored_info_types._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListStoredInfoTypesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_stored_info_types(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_stored_info_types_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_stored_info_types._get_unset_required_fields({}) + assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_stored_info_types_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_stored_info_types") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_stored_info_types") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListStoredInfoTypesRequest.pb(dlp.ListStoredInfoTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListStoredInfoTypesResponse.to_json(dlp.ListStoredInfoTypesResponse()) + + request = dlp.ListStoredInfoTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListStoredInfoTypesResponse() + + client.list_stored_info_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_stored_info_types_rest_bad_request(transport: str = 'rest', request_type=dlp.ListStoredInfoTypesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_stored_info_types(request) + + +def test_list_stored_info_types_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListStoredInfoTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_stored_info_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, args[1]) + + +def test_list_stored_info_types_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), + parent='parent_value', + ) + + +def test_list_stored_info_types_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListStoredInfoTypesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'organizations/sample1'} + + pager = client.list_stored_info_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.StoredInfoType) + for i in results) + + pages = list(client.list_stored_info_types(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteStoredInfoTypeRequest, + dict, +]) +def test_delete_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_stored_info_type_rest_required_fields(request_type=dlp.DeleteStoredInfoTypeRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_stored_info_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_stored_info_type") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteStoredInfoTypeRequest.pb(dlp.DeleteStoredInfoTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteStoredInfoTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_stored_info_type(request) + + +def test_delete_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) + + +def test_delete_stored_info_type_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), + name='name_value', + ) + + +def test_delete_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.HybridInspectDlpJobRequest, + dict, +]) +def test_hybrid_inspect_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.hybrid_inspect_dlp_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_dlp_job_rest_required_fields(request_type=dlp.HybridInspectDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.hybrid_inspect_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_hybrid_inspect_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.hybrid_inspect_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_hybrid_inspect_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_dlp_job") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_dlp_job") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.HybridInspectDlpJobRequest.pb(dlp.HybridInspectDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.HybridInspectResponse.to_json(dlp.HybridInspectResponse()) + + request = dlp.HybridInspectDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.HybridInspectResponse() + + client.hybrid_inspect_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_hybrid_inspect_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.HybridInspectDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.hybrid_inspect_dlp_job(request) + + +def test_hybrid_inspect_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.hybrid_inspect_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect" % client.transport._host, args[1]) + + +def test_hybrid_inspect_dlp_job_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), + name='name_value', + ) + + +def test_hybrid_inspect_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.FinishDlpJobRequest, + dict, +]) +def test_finish_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.finish_dlp_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_finish_dlp_job_rest_required_fields(request_type=dlp.FinishDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).finish_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).finish_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.finish_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_finish_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.finish_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_finish_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_finish_dlp_job") as pre: + pre.assert_not_called() + pb_message = dlp.FinishDlpJobRequest.pb(dlp.FinishDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.FinishDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.finish_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_finish_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.FinishDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.finish_dlp_job(request) + + +def test_finish_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DlpServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DlpServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, + transports.DlpServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = DlpServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DlpServiceGrpcTransport, + ) + +def test_dlp_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DlpServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_dlp_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DlpServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'inspect_content', + 'redact_image', + 'deidentify_content', + 'reidentify_content', + 'list_info_types', + 'create_inspect_template', + 'update_inspect_template', + 'get_inspect_template', + 'list_inspect_templates', + 'delete_inspect_template', + 'create_deidentify_template', + 'update_deidentify_template', + 'get_deidentify_template', + 'list_deidentify_templates', + 'delete_deidentify_template', + 'create_job_trigger', + 'update_job_trigger', + 'hybrid_inspect_job_trigger', + 'get_job_trigger', + 'list_job_triggers', + 'delete_job_trigger', + 'activate_job_trigger', + 'create_dlp_job', + 'list_dlp_jobs', + 'get_dlp_job', + 'delete_dlp_job', + 'cancel_dlp_job', + 'create_stored_info_type', + 'update_stored_info_type', + 'get_stored_info_type', + 'list_stored_info_types', + 'delete_stored_info_type', + 'hybrid_inspect_dlp_job', + 'finish_dlp_job', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_dlp_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DlpServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_dlp_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DlpServiceTransport() + adc.assert_called_once() + + +def test_dlp_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DlpServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, + ], +) +def test_dlp_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, + transports.DlpServiceRestTransport, + ], +) +def test_dlp_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DlpServiceGrpcTransport, grpc_helpers), + (transports.DlpServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_dlp_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dlp.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dlp.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) +def test_dlp_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_dlp_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.DlpServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_dlp_service_host_no_port(transport_name): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dlp.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dlp.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_dlp_service_host_with_port(transport_name): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dlp.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dlp.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_dlp_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DlpServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DlpServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.inspect_content._session + session2 = client2.transport.inspect_content._session + assert session1 != session2 + session1 = client1.transport.redact_image._session + session2 = client2.transport.redact_image._session + assert session1 != session2 + session1 = client1.transport.deidentify_content._session + session2 = client2.transport.deidentify_content._session + assert session1 != session2 + session1 = client1.transport.reidentify_content._session + session2 = client2.transport.reidentify_content._session + assert session1 != session2 + session1 = client1.transport.list_info_types._session + session2 = client2.transport.list_info_types._session + assert session1 != session2 + session1 = client1.transport.create_inspect_template._session + session2 = client2.transport.create_inspect_template._session + assert session1 != session2 + session1 = client1.transport.update_inspect_template._session + session2 = client2.transport.update_inspect_template._session + assert session1 != session2 + session1 = client1.transport.get_inspect_template._session + session2 = client2.transport.get_inspect_template._session + assert session1 != session2 + session1 = client1.transport.list_inspect_templates._session + session2 = client2.transport.list_inspect_templates._session + assert session1 != session2 + session1 = client1.transport.delete_inspect_template._session + session2 = client2.transport.delete_inspect_template._session + assert session1 != session2 + session1 = client1.transport.create_deidentify_template._session + session2 = client2.transport.create_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.update_deidentify_template._session + session2 = client2.transport.update_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.get_deidentify_template._session + session2 = client2.transport.get_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.list_deidentify_templates._session + session2 = client2.transport.list_deidentify_templates._session + assert session1 != session2 + session1 = client1.transport.delete_deidentify_template._session + session2 = client2.transport.delete_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.create_job_trigger._session + session2 = client2.transport.create_job_trigger._session + assert session1 != session2 + session1 = client1.transport.update_job_trigger._session + session2 = client2.transport.update_job_trigger._session + assert session1 != session2 + session1 = client1.transport.hybrid_inspect_job_trigger._session + session2 = client2.transport.hybrid_inspect_job_trigger._session + assert session1 != session2 + session1 = client1.transport.get_job_trigger._session + session2 = client2.transport.get_job_trigger._session + assert session1 != session2 + session1 = client1.transport.list_job_triggers._session + session2 = client2.transport.list_job_triggers._session + assert session1 != session2 + session1 = client1.transport.delete_job_trigger._session + session2 = client2.transport.delete_job_trigger._session + assert session1 != session2 + session1 = client1.transport.activate_job_trigger._session + session2 = client2.transport.activate_job_trigger._session + assert session1 != session2 + session1 = client1.transport.create_dlp_job._session + session2 = client2.transport.create_dlp_job._session + assert session1 != session2 + session1 = client1.transport.list_dlp_jobs._session + session2 = client2.transport.list_dlp_jobs._session + assert session1 != session2 + session1 = client1.transport.get_dlp_job._session + session2 = client2.transport.get_dlp_job._session + assert session1 != session2 + session1 = client1.transport.delete_dlp_job._session + session2 = client2.transport.delete_dlp_job._session + assert session1 != session2 + session1 = client1.transport.cancel_dlp_job._session + session2 = client2.transport.cancel_dlp_job._session + assert session1 != session2 + session1 = client1.transport.create_stored_info_type._session + session2 = client2.transport.create_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.update_stored_info_type._session + session2 = client2.transport.update_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.get_stored_info_type._session + session2 = client2.transport.get_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.list_stored_info_types._session + session2 = client2.transport.list_stored_info_types._session + assert session1 != session2 + session1 = client1.transport.delete_stored_info_type._session + session2 = client2.transport.delete_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.hybrid_inspect_dlp_job._session + session2 = client2.transport.hybrid_inspect_dlp_job._session + assert session1 != session2 + session1 = client1.transport.finish_dlp_job._session + session2 = client2.transport.finish_dlp_job._session + assert session1 != session2 +def test_dlp_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DlpServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_dlp_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DlpServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) +def test_dlp_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) +def test_dlp_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_deidentify_template_path(): + organization = "squid" + deidentify_template = "clam" + expected = "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) + actual = DlpServiceClient.deidentify_template_path(organization, deidentify_template) + assert expected == actual + + +def test_parse_deidentify_template_path(): + expected = { + "organization": "whelk", + "deidentify_template": "octopus", + } + path = DlpServiceClient.deidentify_template_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_deidentify_template_path(path) + assert expected == actual + +def test_dlp_content_path(): + project = "oyster" + expected = "projects/{project}/dlpContent".format(project=project, ) + actual = DlpServiceClient.dlp_content_path(project) + assert expected == actual + + +def test_parse_dlp_content_path(): + expected = { + "project": "nudibranch", + } + path = DlpServiceClient.dlp_content_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_dlp_content_path(path) + assert expected == actual + +def test_dlp_job_path(): + project = "cuttlefish" + dlp_job = "mussel" + expected = "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) + actual = DlpServiceClient.dlp_job_path(project, dlp_job) + assert expected == actual + + +def test_parse_dlp_job_path(): + expected = { + "project": "winkle", + "dlp_job": "nautilus", + } + path = DlpServiceClient.dlp_job_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_dlp_job_path(path) + assert expected == actual + +def test_finding_path(): + project = "scallop" + location = "abalone" + finding = "squid" + expected = "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) + actual = DlpServiceClient.finding_path(project, location, finding) + assert expected == actual + + +def test_parse_finding_path(): + expected = { + "project": "clam", + "location": "whelk", + "finding": "octopus", + } + path = DlpServiceClient.finding_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_finding_path(path) + assert expected == actual + +def test_inspect_template_path(): + organization = "oyster" + inspect_template = "nudibranch" + expected = "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) + actual = DlpServiceClient.inspect_template_path(organization, inspect_template) + assert expected == actual + + +def test_parse_inspect_template_path(): + expected = { + "organization": "cuttlefish", + "inspect_template": "mussel", + } + path = DlpServiceClient.inspect_template_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_inspect_template_path(path) + assert expected == actual + +def test_job_trigger_path(): + project = "winkle" + job_trigger = "nautilus" + expected = "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) + actual = DlpServiceClient.job_trigger_path(project, job_trigger) + assert expected == actual + + +def test_parse_job_trigger_path(): + expected = { + "project": "scallop", + "job_trigger": "abalone", + } + path = DlpServiceClient.job_trigger_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_job_trigger_path(path) + assert expected == actual + +def test_stored_info_type_path(): + organization = "squid" + stored_info_type = "clam" + expected = "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) + actual = DlpServiceClient.stored_info_type_path(organization, stored_info_type) + assert expected == actual + + +def test_parse_stored_info_type_path(): + expected = { + "organization": "whelk", + "stored_info_type": "octopus", + } + path = DlpServiceClient.stored_info_type_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_stored_info_type_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DlpServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = DlpServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = DlpServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = DlpServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DlpServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = DlpServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = DlpServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = DlpServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DlpServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = DlpServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = DlpServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 9000f81a9c85c0a3821944224c0d9d3c41dbea25 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 16 Feb 2023 21:18:04 +0000 Subject: [PATCH 4/7] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../services/dlp_service/async_client.py | 1 + .../dlp_v2/services/dlp_service/client.py | 1 + .../services/dlp_service/transports/base.py | 1 + .../services/dlp_service/transports/grpc.py | 1 + .../dlp_service/transports/grpc_asyncio.py | 1 + .../services/dlp_service/transports/rest.py | 1 + owl-bot-staging/v2/.coveragerc | 13 - owl-bot-staging/v2/.flake8 | 33 - owl-bot-staging/v2/MANIFEST.in | 2 - owl-bot-staging/v2/README.rst | 49 - owl-bot-staging/v2/docs/conf.py | 376 - .../v2/docs/dlp_v2/dlp_service.rst | 10 - owl-bot-staging/v2/docs/dlp_v2/services.rst | 6 - owl-bot-staging/v2/docs/dlp_v2/types.rst | 6 - owl-bot-staging/v2/docs/index.rst | 7 - .../v2/google/cloud/dlp/__init__.py | 395 - .../v2/google/cloud/dlp/gapic_version.py | 16 - owl-bot-staging/v2/google/cloud/dlp/py.typed | 2 - .../v2/google/cloud/dlp_v2/__init__.py | 396 - .../google/cloud/dlp_v2/gapic_metadata.json | 538 - .../v2/google/cloud/dlp_v2/gapic_version.py | 16 - .../v2/google/cloud/dlp_v2/py.typed | 2 - .../google/cloud/dlp_v2/services/__init__.py | 15 - .../dlp_v2/services/dlp_service/__init__.py | 22 - .../services/dlp_service/async_client.py | 4143 ---- .../dlp_v2/services/dlp_service/client.py | 4269 ---- .../dlp_v2/services/dlp_service/pagers.py | 623 - .../dlp_service/transports/__init__.py | 38 - .../services/dlp_service/transports/base.py | 752 - .../services/dlp_service/transports/grpc.py | 1262 -- .../dlp_service/transports/grpc_asyncio.py | 1261 -- .../services/dlp_service/transports/rest.py | 4325 ---- .../v2/google/cloud/dlp_v2/types/__init__.py | 390 - .../v2/google/cloud/dlp_v2/types/dlp.py | 8846 -------- .../v2/google/cloud/dlp_v2/types/storage.py | 1474 -- owl-bot-staging/v2/mypy.ini | 3 - owl-bot-staging/v2/noxfile.py | 184 - ..._dlp_service_activate_job_trigger_async.py | 52 - ...d_dlp_service_activate_job_trigger_sync.py | 52 - ...erated_dlp_service_cancel_dlp_job_async.py | 50 - ...nerated_dlp_service_cancel_dlp_job_sync.py | 50 - ...ervice_create_deidentify_template_async.py | 52 - ...service_create_deidentify_template_sync.py | 52 - ...erated_dlp_service_create_dlp_job_async.py | 52 - ...nerated_dlp_service_create_dlp_job_sync.py | 52 - ...p_service_create_inspect_template_async.py | 52 - ...lp_service_create_inspect_template_sync.py | 52 - ...ed_dlp_service_create_job_trigger_async.py | 56 - ...ted_dlp_service_create_job_trigger_sync.py | 56 - ...p_service_create_stored_info_type_async.py | 52 - ...lp_service_create_stored_info_type_sync.py | 52 - ...ed_dlp_service_deidentify_content_async.py | 51 - ...ted_dlp_service_deidentify_content_sync.py | 51 - ...ervice_delete_deidentify_template_async.py | 50 - ...service_delete_deidentify_template_sync.py | 50 - ...erated_dlp_service_delete_dlp_job_async.py | 50 - ...nerated_dlp_service_delete_dlp_job_sync.py | 50 - ...p_service_delete_inspect_template_async.py | 50 - ...lp_service_delete_inspect_template_sync.py | 50 - ...ed_dlp_service_delete_job_trigger_async.py | 50 - ...ted_dlp_service_delete_job_trigger_sync.py | 50 - ...p_service_delete_stored_info_type_async.py | 50 - ...lp_service_delete_stored_info_type_sync.py | 50 - ...erated_dlp_service_finish_dlp_job_async.py | 50 - ...nerated_dlp_service_finish_dlp_job_sync.py | 50 - ...p_service_get_deidentify_template_async.py | 52 - ...lp_service_get_deidentify_template_sync.py | 52 - ...generated_dlp_service_get_dlp_job_async.py | 52 - ..._generated_dlp_service_get_dlp_job_sync.py | 52 - ..._dlp_service_get_inspect_template_async.py | 52 - ...d_dlp_service_get_inspect_template_sync.py | 52 - ...rated_dlp_service_get_job_trigger_async.py | 52 - ...erated_dlp_service_get_job_trigger_sync.py | 52 - ..._dlp_service_get_stored_info_type_async.py | 52 - ...d_dlp_service_get_stored_info_type_sync.py | 52 - ...lp_service_hybrid_inspect_dlp_job_async.py | 52 - ...dlp_service_hybrid_inspect_dlp_job_sync.py | 52 - ...ervice_hybrid_inspect_job_trigger_async.py | 52 - ...service_hybrid_inspect_job_trigger_sync.py | 52 - ...rated_dlp_service_inspect_content_async.py | 51 - ...erated_dlp_service_inspect_content_sync.py | 51 - ...service_list_deidentify_templates_async.py | 53 - ..._service_list_deidentify_templates_sync.py | 53 - ...nerated_dlp_service_list_dlp_jobs_async.py | 53 - ...enerated_dlp_service_list_dlp_jobs_sync.py | 53 - ...rated_dlp_service_list_info_types_async.py | 51 - ...erated_dlp_service_list_info_types_sync.py | 51 - ...lp_service_list_inspect_templates_async.py | 53 - ...dlp_service_list_inspect_templates_sync.py | 53 - ...ted_dlp_service_list_job_triggers_async.py | 53 - ...ated_dlp_service_list_job_triggers_sync.py | 53 - ...lp_service_list_stored_info_types_async.py | 53 - ...dlp_service_list_stored_info_types_sync.py | 53 - ...enerated_dlp_service_redact_image_async.py | 51 - ...generated_dlp_service_redact_image_sync.py | 51 - ...ed_dlp_service_reidentify_content_async.py | 52 - ...ted_dlp_service_reidentify_content_sync.py | 52 - ...ervice_update_deidentify_template_async.py | 52 - ...service_update_deidentify_template_sync.py | 52 - ...p_service_update_inspect_template_async.py | 52 - ...lp_service_update_inspect_template_sync.py | 52 - ...ed_dlp_service_update_job_trigger_async.py | 52 - ...ted_dlp_service_update_job_trigger_sync.py | 52 - ...p_service_update_stored_info_type_async.py | 52 - ...lp_service_update_stored_info_type_sync.py | 52 - ...nippet_metadata_google.privacy.dlp.v2.json | 5503 ----- .../v2/scripts/fixup_dlp_v2_keywords.py | 209 - owl-bot-staging/v2/setup.py | 90 - .../v2/testing/constraints-3.10.txt | 6 - .../v2/testing/constraints-3.11.txt | 6 - .../v2/testing/constraints-3.12.txt | 6 - .../v2/testing/constraints-3.7.txt | 9 - .../v2/testing/constraints-3.8.txt | 6 - .../v2/testing/constraints-3.9.txt | 6 - owl-bot-staging/v2/tests/__init__.py | 16 - owl-bot-staging/v2/tests/unit/__init__.py | 16 - .../v2/tests/unit/gapic/__init__.py | 16 - .../v2/tests/unit/gapic/dlp_v2/__init__.py | 16 - .../unit/gapic/dlp_v2/test_dlp_service.py | 17404 ---------------- tests/unit/gapic/dlp_v2/test_dlp_service.py | 1 + 120 files changed, 7 insertions(+), 56301 deletions(-) delete mode 100644 owl-bot-staging/v2/.coveragerc delete mode 100644 owl-bot-staging/v2/.flake8 delete mode 100644 owl-bot-staging/v2/MANIFEST.in delete mode 100644 owl-bot-staging/v2/README.rst delete mode 100644 owl-bot-staging/v2/docs/conf.py delete mode 100644 owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst delete mode 100644 owl-bot-staging/v2/docs/dlp_v2/services.rst delete mode 100644 owl-bot-staging/v2/docs/dlp_v2/types.rst delete mode 100644 owl-bot-staging/v2/docs/index.rst delete mode 100644 owl-bot-staging/v2/google/cloud/dlp/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp/gapic_version.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp/py.typed delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/py.typed delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py delete mode 100644 owl-bot-staging/v2/mypy.ini delete mode 100644 owl-bot-staging/v2/noxfile.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json delete mode 100644 owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py delete mode 100644 owl-bot-staging/v2/setup.py delete mode 100644 owl-bot-staging/v2/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/v2/tests/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py diff --git a/google/cloud/dlp_v2/services/dlp_service/async_client.py b/google/cloud/dlp_v2/services/dlp_service/async_client.py index 5adce2d2..7b891db2 100644 --- a/google/cloud/dlp_v2/services/dlp_service/async_client.py +++ b/google/cloud/dlp_v2/services/dlp_service/async_client.py @@ -42,6 +42,7 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore diff --git a/google/cloud/dlp_v2/services/dlp_service/client.py b/google/cloud/dlp_v2/services/dlp_service/client.py index 38635b92..9afd953c 100644 --- a/google/cloud/dlp_v2/services/dlp_service/client.py +++ b/google/cloud/dlp_v2/services/dlp_service/client.py @@ -46,6 +46,7 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/base.py b/google/cloud/dlp_v2/services/dlp_service/transports/base.py index 7ec937f4..8d3bcc3f 100644 --- a/google/cloud/dlp_v2/services/dlp_service/transports/base.py +++ b/google/cloud/dlp_v2/services/dlp_service/transports/base.py @@ -22,6 +22,7 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py b/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py index 49370657..9aa76062 100644 --- a/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py +++ b/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py @@ -20,6 +20,7 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py b/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py index 602fa461..49af838a 100644 --- a/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py +++ b/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py @@ -19,6 +19,7 @@ from google.api_core import gapic_v1, grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/rest.py b/google/cloud/dlp_v2/services/dlp_service/transports/rest.py index b6bfd3c1..ad8e9da0 100644 --- a/google/cloud/dlp_v2/services/dlp_service/transports/rest.py +++ b/google/cloud/dlp_v2/services/dlp_service/transports/rest.py @@ -26,6 +26,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/owl-bot-staging/v2/.coveragerc b/owl-bot-staging/v2/.coveragerc deleted file mode 100644 index 76798ec2..00000000 --- a/owl-bot-staging/v2/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/dlp/__init__.py - google/cloud/dlp/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/v2/.flake8 b/owl-bot-staging/v2/.flake8 deleted file mode 100644 index 29227d4c..00000000 --- a/owl-bot-staging/v2/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/v2/MANIFEST.in b/owl-bot-staging/v2/MANIFEST.in deleted file mode 100644 index 148f6bf3..00000000 --- a/owl-bot-staging/v2/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/dlp *.py -recursive-include google/cloud/dlp_v2 *.py diff --git a/owl-bot-staging/v2/README.rst b/owl-bot-staging/v2/README.rst deleted file mode 100644 index cf97c2e7..00000000 --- a/owl-bot-staging/v2/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Dlp API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Dlp API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v2/docs/conf.py b/owl-bot-staging/v2/docs/conf.py deleted file mode 100644 index cf2f570a..00000000 --- a/owl-bot-staging/v2/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-dlp documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-dlp" -copyright = u"2022, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-dlp-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-dlp.tex", - u"google-cloud-dlp Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-dlp", - u"Google Cloud Dlp Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-dlp", - u"google-cloud-dlp Documentation", - author, - "google-cloud-dlp", - "GAPIC library for Google Cloud Dlp API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst b/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst deleted file mode 100644 index 914da512..00000000 --- a/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -DlpService ----------------------------- - -.. automodule:: google.cloud.dlp_v2.services.dlp_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dlp_v2.services.dlp_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v2/docs/dlp_v2/services.rst b/owl-bot-staging/v2/docs/dlp_v2/services.rst deleted file mode 100644 index 864a8c83..00000000 --- a/owl-bot-staging/v2/docs/dlp_v2/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Dlp v2 API -==================================== -.. toctree:: - :maxdepth: 2 - - dlp_service diff --git a/owl-bot-staging/v2/docs/dlp_v2/types.rst b/owl-bot-staging/v2/docs/dlp_v2/types.rst deleted file mode 100644 index 5470b717..00000000 --- a/owl-bot-staging/v2/docs/dlp_v2/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Dlp v2 API -================================= - -.. automodule:: google.cloud.dlp_v2.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/v2/docs/index.rst b/owl-bot-staging/v2/docs/index.rst deleted file mode 100644 index d119451a..00000000 --- a/owl-bot-staging/v2/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - dlp_v2/services - dlp_v2/types diff --git a/owl-bot-staging/v2/google/cloud/dlp/__init__.py b/owl-bot-staging/v2/google/cloud/dlp/__init__.py deleted file mode 100644 index 3c1a800c..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp/__init__.py +++ /dev/null @@ -1,395 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.dlp import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.dlp_v2.services.dlp_service.client import DlpServiceClient -from google.cloud.dlp_v2.services.dlp_service.async_client import DlpServiceAsyncClient - -from google.cloud.dlp_v2.types.dlp import Action -from google.cloud.dlp_v2.types.dlp import ActivateJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import AnalyzeDataSourceRiskDetails -from google.cloud.dlp_v2.types.dlp import BoundingBox -from google.cloud.dlp_v2.types.dlp import BucketingConfig -from google.cloud.dlp_v2.types.dlp import ByteContentItem -from google.cloud.dlp_v2.types.dlp import CancelDlpJobRequest -from google.cloud.dlp_v2.types.dlp import CharacterMaskConfig -from google.cloud.dlp_v2.types.dlp import CharsToIgnore -from google.cloud.dlp_v2.types.dlp import Color -from google.cloud.dlp_v2.types.dlp import Container -from google.cloud.dlp_v2.types.dlp import ContentItem -from google.cloud.dlp_v2.types.dlp import ContentLocation -from google.cloud.dlp_v2.types.dlp import CreateDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import CreateDlpJobRequest -from google.cloud.dlp_v2.types.dlp import CreateInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import CreateJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import CreateStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import CryptoDeterministicConfig -from google.cloud.dlp_v2.types.dlp import CryptoHashConfig -from google.cloud.dlp_v2.types.dlp import CryptoKey -from google.cloud.dlp_v2.types.dlp import CryptoReplaceFfxFpeConfig -from google.cloud.dlp_v2.types.dlp import DataProfileAction -from google.cloud.dlp_v2.types.dlp import DataProfileConfigSnapshot -from google.cloud.dlp_v2.types.dlp import DataProfileJobConfig -from google.cloud.dlp_v2.types.dlp import DataProfileLocation -from google.cloud.dlp_v2.types.dlp import DataProfilePubSubCondition -from google.cloud.dlp_v2.types.dlp import DataProfilePubSubMessage -from google.cloud.dlp_v2.types.dlp import DataRiskLevel -from google.cloud.dlp_v2.types.dlp import DateShiftConfig -from google.cloud.dlp_v2.types.dlp import DateTime -from google.cloud.dlp_v2.types.dlp import DeidentifyConfig -from google.cloud.dlp_v2.types.dlp import DeidentifyContentRequest -from google.cloud.dlp_v2.types.dlp import DeidentifyContentResponse -from google.cloud.dlp_v2.types.dlp import DeidentifyTemplate -from google.cloud.dlp_v2.types.dlp import DeleteDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import DeleteDlpJobRequest -from google.cloud.dlp_v2.types.dlp import DeleteInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import DeleteJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import DeleteStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import DlpJob -from google.cloud.dlp_v2.types.dlp import DocumentLocation -from google.cloud.dlp_v2.types.dlp import Error -from google.cloud.dlp_v2.types.dlp import ExcludeByHotword -from google.cloud.dlp_v2.types.dlp import ExcludeInfoTypes -from google.cloud.dlp_v2.types.dlp import ExclusionRule -from google.cloud.dlp_v2.types.dlp import FieldTransformation -from google.cloud.dlp_v2.types.dlp import Finding -from google.cloud.dlp_v2.types.dlp import FinishDlpJobRequest -from google.cloud.dlp_v2.types.dlp import FixedSizeBucketingConfig -from google.cloud.dlp_v2.types.dlp import GetDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import GetDlpJobRequest -from google.cloud.dlp_v2.types.dlp import GetInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import GetJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import GetStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import HybridContentItem -from google.cloud.dlp_v2.types.dlp import HybridFindingDetails -from google.cloud.dlp_v2.types.dlp import HybridInspectDlpJobRequest -from google.cloud.dlp_v2.types.dlp import HybridInspectJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import HybridInspectResponse -from google.cloud.dlp_v2.types.dlp import HybridInspectStatistics -from google.cloud.dlp_v2.types.dlp import ImageLocation -from google.cloud.dlp_v2.types.dlp import ImageTransformations -from google.cloud.dlp_v2.types.dlp import InfoTypeCategory -from google.cloud.dlp_v2.types.dlp import InfoTypeDescription -from google.cloud.dlp_v2.types.dlp import InfoTypeStats -from google.cloud.dlp_v2.types.dlp import InfoTypeSummary -from google.cloud.dlp_v2.types.dlp import InfoTypeTransformations -from google.cloud.dlp_v2.types.dlp import InspectConfig -from google.cloud.dlp_v2.types.dlp import InspectContentRequest -from google.cloud.dlp_v2.types.dlp import InspectContentResponse -from google.cloud.dlp_v2.types.dlp import InspectDataSourceDetails -from google.cloud.dlp_v2.types.dlp import InspectionRule -from google.cloud.dlp_v2.types.dlp import InspectionRuleSet -from google.cloud.dlp_v2.types.dlp import InspectJobConfig -from google.cloud.dlp_v2.types.dlp import InspectResult -from google.cloud.dlp_v2.types.dlp import InspectTemplate -from google.cloud.dlp_v2.types.dlp import JobTrigger -from google.cloud.dlp_v2.types.dlp import KmsWrappedCryptoKey -from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryConfig -from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryStats -from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesRequest -from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesResponse -from google.cloud.dlp_v2.types.dlp import ListDlpJobsRequest -from google.cloud.dlp_v2.types.dlp import ListDlpJobsResponse -from google.cloud.dlp_v2.types.dlp import ListInfoTypesRequest -from google.cloud.dlp_v2.types.dlp import ListInfoTypesResponse -from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesRequest -from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesResponse -from google.cloud.dlp_v2.types.dlp import ListJobTriggersRequest -from google.cloud.dlp_v2.types.dlp import ListJobTriggersResponse -from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesRequest -from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesResponse -from google.cloud.dlp_v2.types.dlp import Location -from google.cloud.dlp_v2.types.dlp import Manual -from google.cloud.dlp_v2.types.dlp import MetadataLocation -from google.cloud.dlp_v2.types.dlp import OtherInfoTypeSummary -from google.cloud.dlp_v2.types.dlp import OutputStorageConfig -from google.cloud.dlp_v2.types.dlp import PrimitiveTransformation -from google.cloud.dlp_v2.types.dlp import PrivacyMetric -from google.cloud.dlp_v2.types.dlp import ProfileStatus -from google.cloud.dlp_v2.types.dlp import QuasiId -from google.cloud.dlp_v2.types.dlp import QuoteInfo -from google.cloud.dlp_v2.types.dlp import Range -from google.cloud.dlp_v2.types.dlp import RecordCondition -from google.cloud.dlp_v2.types.dlp import RecordLocation -from google.cloud.dlp_v2.types.dlp import RecordSuppression -from google.cloud.dlp_v2.types.dlp import RecordTransformation -from google.cloud.dlp_v2.types.dlp import RecordTransformations -from google.cloud.dlp_v2.types.dlp import RedactConfig -from google.cloud.dlp_v2.types.dlp import RedactImageRequest -from google.cloud.dlp_v2.types.dlp import RedactImageResponse -from google.cloud.dlp_v2.types.dlp import ReidentifyContentRequest -from google.cloud.dlp_v2.types.dlp import ReidentifyContentResponse -from google.cloud.dlp_v2.types.dlp import ReplaceDictionaryConfig -from google.cloud.dlp_v2.types.dlp import ReplaceValueConfig -from google.cloud.dlp_v2.types.dlp import ReplaceWithInfoTypeConfig -from google.cloud.dlp_v2.types.dlp import RiskAnalysisJobConfig -from google.cloud.dlp_v2.types.dlp import Schedule -from google.cloud.dlp_v2.types.dlp import StatisticalTable -from google.cloud.dlp_v2.types.dlp import StorageMetadataLabel -from google.cloud.dlp_v2.types.dlp import StoredInfoType -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeConfig -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeStats -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeVersion -from google.cloud.dlp_v2.types.dlp import Table -from google.cloud.dlp_v2.types.dlp import TableDataProfile -from google.cloud.dlp_v2.types.dlp import TableLocation -from google.cloud.dlp_v2.types.dlp import TimePartConfig -from google.cloud.dlp_v2.types.dlp import TransformationConfig -from google.cloud.dlp_v2.types.dlp import TransformationDescription -from google.cloud.dlp_v2.types.dlp import TransformationDetails -from google.cloud.dlp_v2.types.dlp import TransformationDetailsStorageConfig -from google.cloud.dlp_v2.types.dlp import TransformationErrorHandling -from google.cloud.dlp_v2.types.dlp import TransformationLocation -from google.cloud.dlp_v2.types.dlp import TransformationOverview -from google.cloud.dlp_v2.types.dlp import TransformationResultStatus -from google.cloud.dlp_v2.types.dlp import TransformationSummary -from google.cloud.dlp_v2.types.dlp import TransientCryptoKey -from google.cloud.dlp_v2.types.dlp import UnwrappedCryptoKey -from google.cloud.dlp_v2.types.dlp import UpdateDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import UpdateInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import UpdateJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import UpdateStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import Value -from google.cloud.dlp_v2.types.dlp import ValueFrequency -from google.cloud.dlp_v2.types.dlp import VersionDescription -from google.cloud.dlp_v2.types.dlp import ContentOption -from google.cloud.dlp_v2.types.dlp import DlpJobType -from google.cloud.dlp_v2.types.dlp import EncryptionStatus -from google.cloud.dlp_v2.types.dlp import InfoTypeSupportedBy -from google.cloud.dlp_v2.types.dlp import MatchingType -from google.cloud.dlp_v2.types.dlp import MetadataType -from google.cloud.dlp_v2.types.dlp import RelationalOperator -from google.cloud.dlp_v2.types.dlp import ResourceVisibility -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeState -from google.cloud.dlp_v2.types.dlp import TransformationContainerType -from google.cloud.dlp_v2.types.dlp import TransformationResultStatusType -from google.cloud.dlp_v2.types.dlp import TransformationType -from google.cloud.dlp_v2.types.storage import BigQueryField -from google.cloud.dlp_v2.types.storage import BigQueryKey -from google.cloud.dlp_v2.types.storage import BigQueryOptions -from google.cloud.dlp_v2.types.storage import BigQueryTable -from google.cloud.dlp_v2.types.storage import CloudStorageFileSet -from google.cloud.dlp_v2.types.storage import CloudStorageOptions -from google.cloud.dlp_v2.types.storage import CloudStoragePath -from google.cloud.dlp_v2.types.storage import CloudStorageRegexFileSet -from google.cloud.dlp_v2.types.storage import CustomInfoType -from google.cloud.dlp_v2.types.storage import DatastoreKey -from google.cloud.dlp_v2.types.storage import DatastoreOptions -from google.cloud.dlp_v2.types.storage import EntityId -from google.cloud.dlp_v2.types.storage import FieldId -from google.cloud.dlp_v2.types.storage import HybridOptions -from google.cloud.dlp_v2.types.storage import InfoType -from google.cloud.dlp_v2.types.storage import Key -from google.cloud.dlp_v2.types.storage import KindExpression -from google.cloud.dlp_v2.types.storage import PartitionId -from google.cloud.dlp_v2.types.storage import RecordKey -from google.cloud.dlp_v2.types.storage import SensitivityScore -from google.cloud.dlp_v2.types.storage import StorageConfig -from google.cloud.dlp_v2.types.storage import StoredType -from google.cloud.dlp_v2.types.storage import TableOptions -from google.cloud.dlp_v2.types.storage import FileType -from google.cloud.dlp_v2.types.storage import Likelihood - -__all__ = ('DlpServiceClient', - 'DlpServiceAsyncClient', - 'Action', - 'ActivateJobTriggerRequest', - 'AnalyzeDataSourceRiskDetails', - 'BoundingBox', - 'BucketingConfig', - 'ByteContentItem', - 'CancelDlpJobRequest', - 'CharacterMaskConfig', - 'CharsToIgnore', - 'Color', - 'Container', - 'ContentItem', - 'ContentLocation', - 'CreateDeidentifyTemplateRequest', - 'CreateDlpJobRequest', - 'CreateInspectTemplateRequest', - 'CreateJobTriggerRequest', - 'CreateStoredInfoTypeRequest', - 'CryptoDeterministicConfig', - 'CryptoHashConfig', - 'CryptoKey', - 'CryptoReplaceFfxFpeConfig', - 'DataProfileAction', - 'DataProfileConfigSnapshot', - 'DataProfileJobConfig', - 'DataProfileLocation', - 'DataProfilePubSubCondition', - 'DataProfilePubSubMessage', - 'DataRiskLevel', - 'DateShiftConfig', - 'DateTime', - 'DeidentifyConfig', - 'DeidentifyContentRequest', - 'DeidentifyContentResponse', - 'DeidentifyTemplate', - 'DeleteDeidentifyTemplateRequest', - 'DeleteDlpJobRequest', - 'DeleteInspectTemplateRequest', - 'DeleteJobTriggerRequest', - 'DeleteStoredInfoTypeRequest', - 'DlpJob', - 'DocumentLocation', - 'Error', - 'ExcludeByHotword', - 'ExcludeInfoTypes', - 'ExclusionRule', - 'FieldTransformation', - 'Finding', - 'FinishDlpJobRequest', - 'FixedSizeBucketingConfig', - 'GetDeidentifyTemplateRequest', - 'GetDlpJobRequest', - 'GetInspectTemplateRequest', - 'GetJobTriggerRequest', - 'GetStoredInfoTypeRequest', - 'HybridContentItem', - 'HybridFindingDetails', - 'HybridInspectDlpJobRequest', - 'HybridInspectJobTriggerRequest', - 'HybridInspectResponse', - 'HybridInspectStatistics', - 'ImageLocation', - 'ImageTransformations', - 'InfoTypeCategory', - 'InfoTypeDescription', - 'InfoTypeStats', - 'InfoTypeSummary', - 'InfoTypeTransformations', - 'InspectConfig', - 'InspectContentRequest', - 'InspectContentResponse', - 'InspectDataSourceDetails', - 'InspectionRule', - 'InspectionRuleSet', - 'InspectJobConfig', - 'InspectResult', - 'InspectTemplate', - 'JobTrigger', - 'KmsWrappedCryptoKey', - 'LargeCustomDictionaryConfig', - 'LargeCustomDictionaryStats', - 'ListDeidentifyTemplatesRequest', - 'ListDeidentifyTemplatesResponse', - 'ListDlpJobsRequest', - 'ListDlpJobsResponse', - 'ListInfoTypesRequest', - 'ListInfoTypesResponse', - 'ListInspectTemplatesRequest', - 'ListInspectTemplatesResponse', - 'ListJobTriggersRequest', - 'ListJobTriggersResponse', - 'ListStoredInfoTypesRequest', - 'ListStoredInfoTypesResponse', - 'Location', - 'Manual', - 'MetadataLocation', - 'OtherInfoTypeSummary', - 'OutputStorageConfig', - 'PrimitiveTransformation', - 'PrivacyMetric', - 'ProfileStatus', - 'QuasiId', - 'QuoteInfo', - 'Range', - 'RecordCondition', - 'RecordLocation', - 'RecordSuppression', - 'RecordTransformation', - 'RecordTransformations', - 'RedactConfig', - 'RedactImageRequest', - 'RedactImageResponse', - 'ReidentifyContentRequest', - 'ReidentifyContentResponse', - 'ReplaceDictionaryConfig', - 'ReplaceValueConfig', - 'ReplaceWithInfoTypeConfig', - 'RiskAnalysisJobConfig', - 'Schedule', - 'StatisticalTable', - 'StorageMetadataLabel', - 'StoredInfoType', - 'StoredInfoTypeConfig', - 'StoredInfoTypeStats', - 'StoredInfoTypeVersion', - 'Table', - 'TableDataProfile', - 'TableLocation', - 'TimePartConfig', - 'TransformationConfig', - 'TransformationDescription', - 'TransformationDetails', - 'TransformationDetailsStorageConfig', - 'TransformationErrorHandling', - 'TransformationLocation', - 'TransformationOverview', - 'TransformationResultStatus', - 'TransformationSummary', - 'TransientCryptoKey', - 'UnwrappedCryptoKey', - 'UpdateDeidentifyTemplateRequest', - 'UpdateInspectTemplateRequest', - 'UpdateJobTriggerRequest', - 'UpdateStoredInfoTypeRequest', - 'Value', - 'ValueFrequency', - 'VersionDescription', - 'ContentOption', - 'DlpJobType', - 'EncryptionStatus', - 'InfoTypeSupportedBy', - 'MatchingType', - 'MetadataType', - 'RelationalOperator', - 'ResourceVisibility', - 'StoredInfoTypeState', - 'TransformationContainerType', - 'TransformationResultStatusType', - 'TransformationType', - 'BigQueryField', - 'BigQueryKey', - 'BigQueryOptions', - 'BigQueryTable', - 'CloudStorageFileSet', - 'CloudStorageOptions', - 'CloudStoragePath', - 'CloudStorageRegexFileSet', - 'CustomInfoType', - 'DatastoreKey', - 'DatastoreOptions', - 'EntityId', - 'FieldId', - 'HybridOptions', - 'InfoType', - 'Key', - 'KindExpression', - 'PartitionId', - 'RecordKey', - 'SensitivityScore', - 'StorageConfig', - 'StoredType', - 'TableOptions', - 'FileType', - 'Likelihood', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp/gapic_version.py b/owl-bot-staging/v2/google/cloud/dlp/gapic_version.py deleted file mode 100644 index 405b1ceb..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/dlp/py.typed b/owl-bot-staging/v2/google/cloud/dlp/py.typed deleted file mode 100644 index 23d89ef3..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py deleted file mode 100644 index 8397a3ad..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py +++ /dev/null @@ -1,396 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.dlp_v2 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.dlp_service import DlpServiceClient -from .services.dlp_service import DlpServiceAsyncClient - -from .types.dlp import Action -from .types.dlp import ActivateJobTriggerRequest -from .types.dlp import AnalyzeDataSourceRiskDetails -from .types.dlp import BoundingBox -from .types.dlp import BucketingConfig -from .types.dlp import ByteContentItem -from .types.dlp import CancelDlpJobRequest -from .types.dlp import CharacterMaskConfig -from .types.dlp import CharsToIgnore -from .types.dlp import Color -from .types.dlp import Container -from .types.dlp import ContentItem -from .types.dlp import ContentLocation -from .types.dlp import CreateDeidentifyTemplateRequest -from .types.dlp import CreateDlpJobRequest -from .types.dlp import CreateInspectTemplateRequest -from .types.dlp import CreateJobTriggerRequest -from .types.dlp import CreateStoredInfoTypeRequest -from .types.dlp import CryptoDeterministicConfig -from .types.dlp import CryptoHashConfig -from .types.dlp import CryptoKey -from .types.dlp import CryptoReplaceFfxFpeConfig -from .types.dlp import DataProfileAction -from .types.dlp import DataProfileConfigSnapshot -from .types.dlp import DataProfileJobConfig -from .types.dlp import DataProfileLocation -from .types.dlp import DataProfilePubSubCondition -from .types.dlp import DataProfilePubSubMessage -from .types.dlp import DataRiskLevel -from .types.dlp import DateShiftConfig -from .types.dlp import DateTime -from .types.dlp import DeidentifyConfig -from .types.dlp import DeidentifyContentRequest -from .types.dlp import DeidentifyContentResponse -from .types.dlp import DeidentifyTemplate -from .types.dlp import DeleteDeidentifyTemplateRequest -from .types.dlp import DeleteDlpJobRequest -from .types.dlp import DeleteInspectTemplateRequest -from .types.dlp import DeleteJobTriggerRequest -from .types.dlp import DeleteStoredInfoTypeRequest -from .types.dlp import DlpJob -from .types.dlp import DocumentLocation -from .types.dlp import Error -from .types.dlp import ExcludeByHotword -from .types.dlp import ExcludeInfoTypes -from .types.dlp import ExclusionRule -from .types.dlp import FieldTransformation -from .types.dlp import Finding -from .types.dlp import FinishDlpJobRequest -from .types.dlp import FixedSizeBucketingConfig -from .types.dlp import GetDeidentifyTemplateRequest -from .types.dlp import GetDlpJobRequest -from .types.dlp import GetInspectTemplateRequest -from .types.dlp import GetJobTriggerRequest -from .types.dlp import GetStoredInfoTypeRequest -from .types.dlp import HybridContentItem -from .types.dlp import HybridFindingDetails -from .types.dlp import HybridInspectDlpJobRequest -from .types.dlp import HybridInspectJobTriggerRequest -from .types.dlp import HybridInspectResponse -from .types.dlp import HybridInspectStatistics -from .types.dlp import ImageLocation -from .types.dlp import ImageTransformations -from .types.dlp import InfoTypeCategory -from .types.dlp import InfoTypeDescription -from .types.dlp import InfoTypeStats -from .types.dlp import InfoTypeSummary -from .types.dlp import InfoTypeTransformations -from .types.dlp import InspectConfig -from .types.dlp import InspectContentRequest -from .types.dlp import InspectContentResponse -from .types.dlp import InspectDataSourceDetails -from .types.dlp import InspectionRule -from .types.dlp import InspectionRuleSet -from .types.dlp import InspectJobConfig -from .types.dlp import InspectResult -from .types.dlp import InspectTemplate -from .types.dlp import JobTrigger -from .types.dlp import KmsWrappedCryptoKey -from .types.dlp import LargeCustomDictionaryConfig -from .types.dlp import LargeCustomDictionaryStats -from .types.dlp import ListDeidentifyTemplatesRequest -from .types.dlp import ListDeidentifyTemplatesResponse -from .types.dlp import ListDlpJobsRequest -from .types.dlp import ListDlpJobsResponse -from .types.dlp import ListInfoTypesRequest -from .types.dlp import ListInfoTypesResponse -from .types.dlp import ListInspectTemplatesRequest -from .types.dlp import ListInspectTemplatesResponse -from .types.dlp import ListJobTriggersRequest -from .types.dlp import ListJobTriggersResponse -from .types.dlp import ListStoredInfoTypesRequest -from .types.dlp import ListStoredInfoTypesResponse -from .types.dlp import Location -from .types.dlp import Manual -from .types.dlp import MetadataLocation -from .types.dlp import OtherInfoTypeSummary -from .types.dlp import OutputStorageConfig -from .types.dlp import PrimitiveTransformation -from .types.dlp import PrivacyMetric -from .types.dlp import ProfileStatus -from .types.dlp import QuasiId -from .types.dlp import QuoteInfo -from .types.dlp import Range -from .types.dlp import RecordCondition -from .types.dlp import RecordLocation -from .types.dlp import RecordSuppression -from .types.dlp import RecordTransformation -from .types.dlp import RecordTransformations -from .types.dlp import RedactConfig -from .types.dlp import RedactImageRequest -from .types.dlp import RedactImageResponse -from .types.dlp import ReidentifyContentRequest -from .types.dlp import ReidentifyContentResponse -from .types.dlp import ReplaceDictionaryConfig -from .types.dlp import ReplaceValueConfig -from .types.dlp import ReplaceWithInfoTypeConfig -from .types.dlp import RiskAnalysisJobConfig -from .types.dlp import Schedule -from .types.dlp import StatisticalTable -from .types.dlp import StorageMetadataLabel -from .types.dlp import StoredInfoType -from .types.dlp import StoredInfoTypeConfig -from .types.dlp import StoredInfoTypeStats -from .types.dlp import StoredInfoTypeVersion -from .types.dlp import Table -from .types.dlp import TableDataProfile -from .types.dlp import TableLocation -from .types.dlp import TimePartConfig -from .types.dlp import TransformationConfig -from .types.dlp import TransformationDescription -from .types.dlp import TransformationDetails -from .types.dlp import TransformationDetailsStorageConfig -from .types.dlp import TransformationErrorHandling -from .types.dlp import TransformationLocation -from .types.dlp import TransformationOverview -from .types.dlp import TransformationResultStatus -from .types.dlp import TransformationSummary -from .types.dlp import TransientCryptoKey -from .types.dlp import UnwrappedCryptoKey -from .types.dlp import UpdateDeidentifyTemplateRequest -from .types.dlp import UpdateInspectTemplateRequest -from .types.dlp import UpdateJobTriggerRequest -from .types.dlp import UpdateStoredInfoTypeRequest -from .types.dlp import Value -from .types.dlp import ValueFrequency -from .types.dlp import VersionDescription -from .types.dlp import ContentOption -from .types.dlp import DlpJobType -from .types.dlp import EncryptionStatus -from .types.dlp import InfoTypeSupportedBy -from .types.dlp import MatchingType -from .types.dlp import MetadataType -from .types.dlp import RelationalOperator -from .types.dlp import ResourceVisibility -from .types.dlp import StoredInfoTypeState -from .types.dlp import TransformationContainerType -from .types.dlp import TransformationResultStatusType -from .types.dlp import TransformationType -from .types.storage import BigQueryField -from .types.storage import BigQueryKey -from .types.storage import BigQueryOptions -from .types.storage import BigQueryTable -from .types.storage import CloudStorageFileSet -from .types.storage import CloudStorageOptions -from .types.storage import CloudStoragePath -from .types.storage import CloudStorageRegexFileSet -from .types.storage import CustomInfoType -from .types.storage import DatastoreKey -from .types.storage import DatastoreOptions -from .types.storage import EntityId -from .types.storage import FieldId -from .types.storage import HybridOptions -from .types.storage import InfoType -from .types.storage import Key -from .types.storage import KindExpression -from .types.storage import PartitionId -from .types.storage import RecordKey -from .types.storage import SensitivityScore -from .types.storage import StorageConfig -from .types.storage import StoredType -from .types.storage import TableOptions -from .types.storage import FileType -from .types.storage import Likelihood - -__all__ = ( - 'DlpServiceAsyncClient', -'Action', -'ActivateJobTriggerRequest', -'AnalyzeDataSourceRiskDetails', -'BigQueryField', -'BigQueryKey', -'BigQueryOptions', -'BigQueryTable', -'BoundingBox', -'BucketingConfig', -'ByteContentItem', -'CancelDlpJobRequest', -'CharacterMaskConfig', -'CharsToIgnore', -'CloudStorageFileSet', -'CloudStorageOptions', -'CloudStoragePath', -'CloudStorageRegexFileSet', -'Color', -'Container', -'ContentItem', -'ContentLocation', -'ContentOption', -'CreateDeidentifyTemplateRequest', -'CreateDlpJobRequest', -'CreateInspectTemplateRequest', -'CreateJobTriggerRequest', -'CreateStoredInfoTypeRequest', -'CryptoDeterministicConfig', -'CryptoHashConfig', -'CryptoKey', -'CryptoReplaceFfxFpeConfig', -'CustomInfoType', -'DataProfileAction', -'DataProfileConfigSnapshot', -'DataProfileJobConfig', -'DataProfileLocation', -'DataProfilePubSubCondition', -'DataProfilePubSubMessage', -'DataRiskLevel', -'DatastoreKey', -'DatastoreOptions', -'DateShiftConfig', -'DateTime', -'DeidentifyConfig', -'DeidentifyContentRequest', -'DeidentifyContentResponse', -'DeidentifyTemplate', -'DeleteDeidentifyTemplateRequest', -'DeleteDlpJobRequest', -'DeleteInspectTemplateRequest', -'DeleteJobTriggerRequest', -'DeleteStoredInfoTypeRequest', -'DlpJob', -'DlpJobType', -'DlpServiceClient', -'DocumentLocation', -'EncryptionStatus', -'EntityId', -'Error', -'ExcludeByHotword', -'ExcludeInfoTypes', -'ExclusionRule', -'FieldId', -'FieldTransformation', -'FileType', -'Finding', -'FinishDlpJobRequest', -'FixedSizeBucketingConfig', -'GetDeidentifyTemplateRequest', -'GetDlpJobRequest', -'GetInspectTemplateRequest', -'GetJobTriggerRequest', -'GetStoredInfoTypeRequest', -'HybridContentItem', -'HybridFindingDetails', -'HybridInspectDlpJobRequest', -'HybridInspectJobTriggerRequest', -'HybridInspectResponse', -'HybridInspectStatistics', -'HybridOptions', -'ImageLocation', -'ImageTransformations', -'InfoType', -'InfoTypeCategory', -'InfoTypeDescription', -'InfoTypeStats', -'InfoTypeSummary', -'InfoTypeSupportedBy', -'InfoTypeTransformations', -'InspectConfig', -'InspectContentRequest', -'InspectContentResponse', -'InspectDataSourceDetails', -'InspectJobConfig', -'InspectResult', -'InspectTemplate', -'InspectionRule', -'InspectionRuleSet', -'JobTrigger', -'Key', -'KindExpression', -'KmsWrappedCryptoKey', -'LargeCustomDictionaryConfig', -'LargeCustomDictionaryStats', -'Likelihood', -'ListDeidentifyTemplatesRequest', -'ListDeidentifyTemplatesResponse', -'ListDlpJobsRequest', -'ListDlpJobsResponse', -'ListInfoTypesRequest', -'ListInfoTypesResponse', -'ListInspectTemplatesRequest', -'ListInspectTemplatesResponse', -'ListJobTriggersRequest', -'ListJobTriggersResponse', -'ListStoredInfoTypesRequest', -'ListStoredInfoTypesResponse', -'Location', -'Manual', -'MatchingType', -'MetadataLocation', -'MetadataType', -'OtherInfoTypeSummary', -'OutputStorageConfig', -'PartitionId', -'PrimitiveTransformation', -'PrivacyMetric', -'ProfileStatus', -'QuasiId', -'QuoteInfo', -'Range', -'RecordCondition', -'RecordKey', -'RecordLocation', -'RecordSuppression', -'RecordTransformation', -'RecordTransformations', -'RedactConfig', -'RedactImageRequest', -'RedactImageResponse', -'ReidentifyContentRequest', -'ReidentifyContentResponse', -'RelationalOperator', -'ReplaceDictionaryConfig', -'ReplaceValueConfig', -'ReplaceWithInfoTypeConfig', -'ResourceVisibility', -'RiskAnalysisJobConfig', -'Schedule', -'SensitivityScore', -'StatisticalTable', -'StorageConfig', -'StorageMetadataLabel', -'StoredInfoType', -'StoredInfoTypeConfig', -'StoredInfoTypeState', -'StoredInfoTypeStats', -'StoredInfoTypeVersion', -'StoredType', -'Table', -'TableDataProfile', -'TableLocation', -'TableOptions', -'TimePartConfig', -'TransformationConfig', -'TransformationContainerType', -'TransformationDescription', -'TransformationDetails', -'TransformationDetailsStorageConfig', -'TransformationErrorHandling', -'TransformationLocation', -'TransformationOverview', -'TransformationResultStatus', -'TransformationResultStatusType', -'TransformationSummary', -'TransformationType', -'TransientCryptoKey', -'UnwrappedCryptoKey', -'UpdateDeidentifyTemplateRequest', -'UpdateInspectTemplateRequest', -'UpdateJobTriggerRequest', -'UpdateStoredInfoTypeRequest', -'Value', -'ValueFrequency', -'VersionDescription', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json deleted file mode 100644 index 634002d4..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json +++ /dev/null @@ -1,538 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.dlp_v2", - "protoPackage": "google.privacy.dlp.v2", - "schema": "1.0", - "services": { - "DlpService": { - "clients": { - "grpc": { - "libraryClient": "DlpServiceClient", - "rpcs": { - "ActivateJobTrigger": { - "methods": [ - "activate_job_trigger" - ] - }, - "CancelDlpJob": { - "methods": [ - "cancel_dlp_job" - ] - }, - "CreateDeidentifyTemplate": { - "methods": [ - "create_deidentify_template" - ] - }, - "CreateDlpJob": { - "methods": [ - "create_dlp_job" - ] - }, - "CreateInspectTemplate": { - "methods": [ - "create_inspect_template" - ] - }, - "CreateJobTrigger": { - "methods": [ - "create_job_trigger" - ] - }, - "CreateStoredInfoType": { - "methods": [ - "create_stored_info_type" - ] - }, - "DeidentifyContent": { - "methods": [ - "deidentify_content" - ] - }, - "DeleteDeidentifyTemplate": { - "methods": [ - "delete_deidentify_template" - ] - }, - "DeleteDlpJob": { - "methods": [ - "delete_dlp_job" - ] - }, - "DeleteInspectTemplate": { - "methods": [ - "delete_inspect_template" - ] - }, - "DeleteJobTrigger": { - "methods": [ - "delete_job_trigger" - ] - }, - "DeleteStoredInfoType": { - "methods": [ - "delete_stored_info_type" - ] - }, - "FinishDlpJob": { - "methods": [ - "finish_dlp_job" - ] - }, - "GetDeidentifyTemplate": { - "methods": [ - "get_deidentify_template" - ] - }, - "GetDlpJob": { - "methods": [ - "get_dlp_job" - ] - }, - "GetInspectTemplate": { - "methods": [ - "get_inspect_template" - ] - }, - "GetJobTrigger": { - "methods": [ - "get_job_trigger" - ] - }, - "GetStoredInfoType": { - "methods": [ - "get_stored_info_type" - ] - }, - "HybridInspectDlpJob": { - "methods": [ - "hybrid_inspect_dlp_job" - ] - }, - "HybridInspectJobTrigger": { - "methods": [ - "hybrid_inspect_job_trigger" - ] - }, - "InspectContent": { - "methods": [ - "inspect_content" - ] - }, - "ListDeidentifyTemplates": { - "methods": [ - "list_deidentify_templates" - ] - }, - "ListDlpJobs": { - "methods": [ - "list_dlp_jobs" - ] - }, - "ListInfoTypes": { - "methods": [ - "list_info_types" - ] - }, - "ListInspectTemplates": { - "methods": [ - "list_inspect_templates" - ] - }, - "ListJobTriggers": { - "methods": [ - "list_job_triggers" - ] - }, - "ListStoredInfoTypes": { - "methods": [ - "list_stored_info_types" - ] - }, - "RedactImage": { - "methods": [ - "redact_image" - ] - }, - "ReidentifyContent": { - "methods": [ - "reidentify_content" - ] - }, - "UpdateDeidentifyTemplate": { - "methods": [ - "update_deidentify_template" - ] - }, - "UpdateInspectTemplate": { - "methods": [ - "update_inspect_template" - ] - }, - "UpdateJobTrigger": { - "methods": [ - "update_job_trigger" - ] - }, - "UpdateStoredInfoType": { - "methods": [ - "update_stored_info_type" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DlpServiceAsyncClient", - "rpcs": { - "ActivateJobTrigger": { - "methods": [ - "activate_job_trigger" - ] - }, - "CancelDlpJob": { - "methods": [ - "cancel_dlp_job" - ] - }, - "CreateDeidentifyTemplate": { - "methods": [ - "create_deidentify_template" - ] - }, - "CreateDlpJob": { - "methods": [ - "create_dlp_job" - ] - }, - "CreateInspectTemplate": { - "methods": [ - "create_inspect_template" - ] - }, - "CreateJobTrigger": { - "methods": [ - "create_job_trigger" - ] - }, - "CreateStoredInfoType": { - "methods": [ - "create_stored_info_type" - ] - }, - "DeidentifyContent": { - "methods": [ - "deidentify_content" - ] - }, - "DeleteDeidentifyTemplate": { - "methods": [ - "delete_deidentify_template" - ] - }, - "DeleteDlpJob": { - "methods": [ - "delete_dlp_job" - ] - }, - "DeleteInspectTemplate": { - "methods": [ - "delete_inspect_template" - ] - }, - "DeleteJobTrigger": { - "methods": [ - "delete_job_trigger" - ] - }, - "DeleteStoredInfoType": { - "methods": [ - "delete_stored_info_type" - ] - }, - "FinishDlpJob": { - "methods": [ - "finish_dlp_job" - ] - }, - "GetDeidentifyTemplate": { - "methods": [ - "get_deidentify_template" - ] - }, - "GetDlpJob": { - "methods": [ - "get_dlp_job" - ] - }, - "GetInspectTemplate": { - "methods": [ - "get_inspect_template" - ] - }, - "GetJobTrigger": { - "methods": [ - "get_job_trigger" - ] - }, - "GetStoredInfoType": { - "methods": [ - "get_stored_info_type" - ] - }, - "HybridInspectDlpJob": { - "methods": [ - "hybrid_inspect_dlp_job" - ] - }, - "HybridInspectJobTrigger": { - "methods": [ - "hybrid_inspect_job_trigger" - ] - }, - "InspectContent": { - "methods": [ - "inspect_content" - ] - }, - "ListDeidentifyTemplates": { - "methods": [ - "list_deidentify_templates" - ] - }, - "ListDlpJobs": { - "methods": [ - "list_dlp_jobs" - ] - }, - "ListInfoTypes": { - "methods": [ - "list_info_types" - ] - }, - "ListInspectTemplates": { - "methods": [ - "list_inspect_templates" - ] - }, - "ListJobTriggers": { - "methods": [ - "list_job_triggers" - ] - }, - "ListStoredInfoTypes": { - "methods": [ - "list_stored_info_types" - ] - }, - "RedactImage": { - "methods": [ - "redact_image" - ] - }, - "ReidentifyContent": { - "methods": [ - "reidentify_content" - ] - }, - "UpdateDeidentifyTemplate": { - "methods": [ - "update_deidentify_template" - ] - }, - "UpdateInspectTemplate": { - "methods": [ - "update_inspect_template" - ] - }, - "UpdateJobTrigger": { - "methods": [ - "update_job_trigger" - ] - }, - "UpdateStoredInfoType": { - "methods": [ - "update_stored_info_type" - ] - } - } - }, - "rest": { - "libraryClient": "DlpServiceClient", - "rpcs": { - "ActivateJobTrigger": { - "methods": [ - "activate_job_trigger" - ] - }, - "CancelDlpJob": { - "methods": [ - "cancel_dlp_job" - ] - }, - "CreateDeidentifyTemplate": { - "methods": [ - "create_deidentify_template" - ] - }, - "CreateDlpJob": { - "methods": [ - "create_dlp_job" - ] - }, - "CreateInspectTemplate": { - "methods": [ - "create_inspect_template" - ] - }, - "CreateJobTrigger": { - "methods": [ - "create_job_trigger" - ] - }, - "CreateStoredInfoType": { - "methods": [ - "create_stored_info_type" - ] - }, - "DeidentifyContent": { - "methods": [ - "deidentify_content" - ] - }, - "DeleteDeidentifyTemplate": { - "methods": [ - "delete_deidentify_template" - ] - }, - "DeleteDlpJob": { - "methods": [ - "delete_dlp_job" - ] - }, - "DeleteInspectTemplate": { - "methods": [ - "delete_inspect_template" - ] - }, - "DeleteJobTrigger": { - "methods": [ - "delete_job_trigger" - ] - }, - "DeleteStoredInfoType": { - "methods": [ - "delete_stored_info_type" - ] - }, - "FinishDlpJob": { - "methods": [ - "finish_dlp_job" - ] - }, - "GetDeidentifyTemplate": { - "methods": [ - "get_deidentify_template" - ] - }, - "GetDlpJob": { - "methods": [ - "get_dlp_job" - ] - }, - "GetInspectTemplate": { - "methods": [ - "get_inspect_template" - ] - }, - "GetJobTrigger": { - "methods": [ - "get_job_trigger" - ] - }, - "GetStoredInfoType": { - "methods": [ - "get_stored_info_type" - ] - }, - "HybridInspectDlpJob": { - "methods": [ - "hybrid_inspect_dlp_job" - ] - }, - "HybridInspectJobTrigger": { - "methods": [ - "hybrid_inspect_job_trigger" - ] - }, - "InspectContent": { - "methods": [ - "inspect_content" - ] - }, - "ListDeidentifyTemplates": { - "methods": [ - "list_deidentify_templates" - ] - }, - "ListDlpJobs": { - "methods": [ - "list_dlp_jobs" - ] - }, - "ListInfoTypes": { - "methods": [ - "list_info_types" - ] - }, - "ListInspectTemplates": { - "methods": [ - "list_inspect_templates" - ] - }, - "ListJobTriggers": { - "methods": [ - "list_job_triggers" - ] - }, - "ListStoredInfoTypes": { - "methods": [ - "list_stored_info_types" - ] - }, - "RedactImage": { - "methods": [ - "redact_image" - ] - }, - "ReidentifyContent": { - "methods": [ - "reidentify_content" - ] - }, - "UpdateDeidentifyTemplate": { - "methods": [ - "update_deidentify_template" - ] - }, - "UpdateInspectTemplate": { - "methods": [ - "update_inspect_template" - ] - }, - "UpdateJobTrigger": { - "methods": [ - "update_job_trigger" - ] - }, - "UpdateStoredInfoType": { - "methods": [ - "update_stored_info_type" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py deleted file mode 100644 index 405b1ceb..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed b/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed deleted file mode 100644 index 23d89ef3..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py deleted file mode 100644 index e8e1c384..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py deleted file mode 100644 index aa9c062a..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DlpServiceClient -from .async_client import DlpServiceAsyncClient - -__all__ = ( - 'DlpServiceClient', - 'DlpServiceAsyncClient', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py deleted file mode 100644 index 041479c1..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py +++ /dev/null @@ -1,4143 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dlp_v2 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dlp_v2.services.dlp_service import pagers -from google.cloud.dlp_v2.types import dlp -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport -from .client import DlpServiceClient - - -class DlpServiceAsyncClient: - """The Cloud Data Loss Prevention (DLP) API is a service that - allows clients to detect the presence of Personally Identifiable - Information (PII) and other privacy-sensitive data in - user-supplied, unstructured data streams, like text blocks or - images. - The service also includes methods for sensitive data redaction - and scheduling of data scans on Google Cloud Platform based data - sets. - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - """ - - _client: DlpServiceClient - - DEFAULT_ENDPOINT = DlpServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DlpServiceClient.DEFAULT_MTLS_ENDPOINT - - deidentify_template_path = staticmethod(DlpServiceClient.deidentify_template_path) - parse_deidentify_template_path = staticmethod(DlpServiceClient.parse_deidentify_template_path) - dlp_content_path = staticmethod(DlpServiceClient.dlp_content_path) - parse_dlp_content_path = staticmethod(DlpServiceClient.parse_dlp_content_path) - dlp_job_path = staticmethod(DlpServiceClient.dlp_job_path) - parse_dlp_job_path = staticmethod(DlpServiceClient.parse_dlp_job_path) - finding_path = staticmethod(DlpServiceClient.finding_path) - parse_finding_path = staticmethod(DlpServiceClient.parse_finding_path) - inspect_template_path = staticmethod(DlpServiceClient.inspect_template_path) - parse_inspect_template_path = staticmethod(DlpServiceClient.parse_inspect_template_path) - job_trigger_path = staticmethod(DlpServiceClient.job_trigger_path) - parse_job_trigger_path = staticmethod(DlpServiceClient.parse_job_trigger_path) - stored_info_type_path = staticmethod(DlpServiceClient.stored_info_type_path) - parse_stored_info_type_path = staticmethod(DlpServiceClient.parse_stored_info_type_path) - common_billing_account_path = staticmethod(DlpServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DlpServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DlpServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(DlpServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(DlpServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(DlpServiceClient.parse_common_organization_path) - common_project_path = staticmethod(DlpServiceClient.common_project_path) - parse_common_project_path = staticmethod(DlpServiceClient.parse_common_project_path) - common_location_path = staticmethod(DlpServiceClient.common_location_path) - parse_common_location_path = staticmethod(DlpServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceAsyncClient: The constructed client. - """ - return DlpServiceClient.from_service_account_info.__func__(DlpServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceAsyncClient: The constructed client. - """ - return DlpServiceClient.from_service_account_file.__func__(DlpServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DlpServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DlpServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DlpServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(DlpServiceClient).get_transport_class, type(DlpServiceClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DlpServiceTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the dlp service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.DlpServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DlpServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def inspect_content(self, - request: Optional[Union[dlp.InspectContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectContentResponse: - r"""Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - For how to guides, see - https://cloud.google.com/dlp/docs/inspecting-images and - https://cloud.google.com/dlp/docs/inspecting-text, - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_inspect_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.InspectContentRequest( - ) - - # Make the request - response = await client.inspect_content(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.InspectContentRequest, dict]]): - The request object. Request to search for potentially - sensitive info in a ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectContentResponse: - Results of inspecting an item. - """ - # Create or coerce a protobuf request object. - request = dlp.InspectContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.inspect_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def redact_image(self, - request: Optional[Union[dlp.RedactImageRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.RedactImageResponse: - r"""Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/dlp/docs/redacting-sensitive-data-images - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_redact_image(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.RedactImageRequest( - ) - - # Make the request - response = await client.redact_image(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.RedactImageRequest, dict]]): - The request object. Request to search for potentially - sensitive info in an image and redact it by covering it - with a colored rectangle. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.RedactImageResponse: - Results of redacting an image. - """ - # Create or coerce a protobuf request object. - request = dlp.RedactImageRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.redact_image, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def deidentify_content(self, - request: Optional[Union[dlp.DeidentifyContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyContentResponse: - r"""De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/dlp/docs/deidentify-sensitive-data - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_deidentify_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeidentifyContentRequest( - ) - - # Make the request - response = await client.deidentify_content(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeidentifyContentRequest, dict]]): - The request object. Request to de-identify a - ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyContentResponse: - Results of de-identifying a - ContentItem. - - """ - # Create or coerce a protobuf request object. - request = dlp.DeidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.deidentify_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def reidentify_content(self, - request: Optional[Union[dlp.ReidentifyContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.ReidentifyContentResponse: - r"""Re-identifies content that has been de-identified. See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_reidentify_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ReidentifyContentRequest( - parent="parent_value", - ) - - # Make the request - response = await client.reidentify_content(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ReidentifyContentRequest, dict]]): - The request object. Request to re-identify an item. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.ReidentifyContentResponse: - Results of re-identifying an item. - """ - # Create or coerce a protobuf request object. - request = dlp.ReidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.reidentify_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_info_types(self, - request: Optional[Union[dlp.ListInfoTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.ListInfoTypesResponse: - r"""Returns a list of the sensitive information types - that DLP API supports. See - https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_info_types(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListInfoTypesRequest( - ) - - # Make the request - response = await client.list_info_types(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListInfoTypesRequest, dict]]): - The request object. Request for the list of infoTypes. - parent (:class:`str`): - The parent resource name. - - The format of this value is as follows: - - :: - - locations/LOCATION_ID - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.ListInfoTypesResponse: - Response to the ListInfoTypes - request. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListInfoTypesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_info_types, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_inspect_template(self, - request: Optional[Union[dlp.CreateInspectTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - inspect_template: Optional[dlp.InspectTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Creates an InspectTemplate for reusing frequently - used configuration for inspecting content, images, and - storage. See - https://cloud.google.com/dlp/docs/creating-templates to - learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateInspectTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateInspectTemplateRequest, dict]]): - The request object. Request message for - CreateInspectTemplate. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): - Required. The InspectTemplate to - create. - - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, inspect_template]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_template is not None: - request.inspect_template = inspect_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_inspect_template, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_inspect_template(self, - request: Optional[Union[dlp.UpdateInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - inspect_template: Optional[dlp.InspectTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_update_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.update_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.UpdateInspectTemplateRequest, dict]]): - The request object. Request message for - UpdateInspectTemplate. - name (:class:`str`): - Required. Resource name of organization and - inspectTemplate to be updated, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): - New InspectTemplate value. - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, inspect_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.UpdateInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if inspect_template is not None: - request.inspect_template = inspect_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_inspect_template, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_inspect_template(self, - request: Optional[Union[dlp.GetInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetInspectTemplateRequest, dict]]): - The request object. Request message for - GetInspectTemplate. - name (:class:`str`): - Required. Resource name of the organization and - inspectTemplate to be read, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_inspect_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_inspect_templates(self, - request: Optional[Union[dlp.ListInspectTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInspectTemplatesAsyncPager: - r"""Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_inspect_templates(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListInspectTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_inspect_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListInspectTemplatesRequest, dict]]): - The request object. Request message for - ListInspectTemplates. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager: - Response message for - ListInspectTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListInspectTemplatesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_inspect_templates, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInspectTemplatesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_inspect_template(self, - request: Optional[Union[dlp.DeleteInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteInspectTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_inspect_template(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteInspectTemplateRequest, dict]]): - The request object. Request message for - DeleteInspectTemplate. - name (:class:`str`): - Required. Resource name of the organization and - inspectTemplate to be deleted, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_inspect_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_deidentify_template(self, - request: Optional[Union[dlp.CreateDeidentifyTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - deidentify_template: Optional[dlp.DeidentifyTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Creates a DeidentifyTemplate for reusing frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDeidentifyTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest, dict]]): - The request object. Request message for - CreateDeidentifyTemplate. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): - Required. The DeidentifyTemplate to - create. - - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, deidentify_template]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if deidentify_template is not None: - request.deidentify_template = deidentify_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_deidentify_template, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_deidentify_template(self, - request: Optional[Union[dlp.UpdateDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - deidentify_template: Optional[dlp.DeidentifyTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Updates the DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_update_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.update_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest, dict]]): - The request object. Request message for - UpdateDeidentifyTemplate. - name (:class:`str`): - Required. Resource name of organization and deidentify - template to be updated, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): - New DeidentifyTemplate value. - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, deidentify_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.UpdateDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if deidentify_template is not None: - request.deidentify_template = deidentify_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_deidentify_template, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_deidentify_template(self, - request: Optional[Union[dlp.GetDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Gets a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest, dict]]): - The request object. Request message for - GetDeidentifyTemplate. - name (:class:`str`): - Required. Resource name of the organization and - deidentify template to be read, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_deidentify_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_deidentify_templates(self, - request: Optional[Union[dlp.ListDeidentifyTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDeidentifyTemplatesAsyncPager: - r"""Lists DeidentifyTemplates. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_deidentify_templates(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListDeidentifyTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_deidentify_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest, dict]]): - The request object. Request message for - ListDeidentifyTemplates. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager: - Response message for - ListDeidentifyTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListDeidentifyTemplatesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_deidentify_templates, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDeidentifyTemplatesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_deidentify_template(self, - request: Optional[Union[dlp.DeleteDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_deidentify_template(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest, dict]]): - The request object. Request message for - DeleteDeidentifyTemplate. - name (:class:`str`): - Required. Resource name of the organization and - deidentify template to be deleted, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_deidentify_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_job_trigger(self, - request: Optional[Union[dlp.CreateJobTriggerRequest, dict]] = None, - *, - parent: Optional[str] = None, - job_trigger: Optional[dlp.JobTrigger] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - job_trigger = dlp_v2.JobTrigger() - job_trigger.status = "CANCELLED" - - request = dlp_v2.CreateJobTriggerRequest( - parent="parent_value", - job_trigger=job_trigger, - ) - - # Make the request - response = await client.create_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateJobTriggerRequest, dict]]): - The request object. Request message for - CreateJobTrigger. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): - Required. The JobTrigger to create. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job_trigger]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job_trigger is not None: - request.job_trigger = job_trigger - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_job_trigger, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_job_trigger(self, - request: Optional[Union[dlp.UpdateJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - job_trigger: Optional[dlp.JobTrigger] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Updates a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_update_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.update_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.UpdateJobTriggerRequest, dict]]): - The request object. Request message for - UpdateJobTrigger. - name (:class:`str`): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): - New JobTrigger value. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, job_trigger, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.UpdateJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if job_trigger is not None: - request.job_trigger = job_trigger - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_job_trigger, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def hybrid_inspect_job_trigger(self, - request: Optional[Union[dlp.HybridInspectJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_hybrid_inspect_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.hybrid_inspect_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest, dict]]): - The request object. Request to search for potentially - sensitive info in a custom location. - name (:class:`str`): - Required. Resource name of the trigger to execute a - hybrid inspect on, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.HybridInspectJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.hybrid_inspect_job_trigger, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job_trigger(self, - request: Optional[Union[dlp.GetJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Gets a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetJobTriggerRequest, dict]]): - The request object. Request message for GetJobTrigger. - name (:class:`str`): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_job_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_job_triggers(self, - request: Optional[Union[dlp.ListJobTriggersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobTriggersAsyncPager: - r"""Lists job triggers. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_job_triggers(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListJobTriggersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_triggers(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListJobTriggersRequest, dict]]): - The request object. Request message for ListJobTriggers. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager: - Response message for ListJobTriggers. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListJobTriggersRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_job_triggers, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobTriggersAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_job_trigger(self, - request: Optional[Union[dlp.DeleteJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteJobTriggerRequest( - name="name_value", - ) - - # Make the request - await client.delete_job_trigger(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteJobTriggerRequest, dict]]): - The request object. Request message for - DeleteJobTrigger. - name (:class:`str`): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_job_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def activate_job_trigger(self, - request: Optional[Union[dlp.ActivateJobTriggerRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_activate_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ActivateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.activate_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ActivateJobTriggerRequest, dict]]): - The request object. Request message for - ActivateJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - request = dlp.ActivateJobTriggerRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.activate_job_trigger, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_dlp_job(self, - request: Optional[Union[dlp.CreateDlpJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - inspect_job: Optional[dlp.InspectJobConfig] = None, - risk_job: Optional[dlp.RiskAnalysisJobConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDlpJobRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateDlpJobRequest, dict]]): - The request object. Request message for - CreateDlpJobRequest. Used to initiate long running jobs - such as calculating risk metrics or inspecting Google - Cloud Storage. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_job (:class:`google.cloud.dlp_v2.types.InspectJobConfig`): - An inspection job scans a storage - repository for InfoTypes. - - This corresponds to the ``inspect_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - risk_job (:class:`google.cloud.dlp_v2.types.RiskAnalysisJobConfig`): - A risk analysis job calculates - re-identification risk metrics for a - BigQuery table. - - This corresponds to the ``risk_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, inspect_job, risk_job]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_job is not None: - request.inspect_job = inspect_job - if risk_job is not None: - request.risk_job = risk_job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_dlp_job, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_dlp_jobs(self, - request: Optional[Union[dlp.ListDlpJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDlpJobsAsyncPager: - r"""Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_dlp_jobs(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListDlpJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_dlp_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListDlpJobsRequest, dict]]): - The request object. The request message for listing DLP - jobs. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager: - The response message for listing DLP - jobs. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListDlpJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_dlp_jobs, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDlpJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_dlp_job(self, - request: Optional[Union[dlp.GetDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetDlpJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetDlpJobRequest, dict]]): - The request object. The request message for - [DlpJobs.GetDlpJob][]. - name (:class:`str`): - Required. The name of the DlpJob - resource. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_dlp_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_dlp_job(self, - request: Optional[Union[dlp.DeleteDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be canceled if possible. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.delete_dlp_job(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteDlpJobRequest, dict]]): - The request object. The request message for deleting a - DLP job. - name (:class:`str`): - Required. The name of the DlpJob - resource to be deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_dlp_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def cancel_dlp_job(self, - request: Optional[Union[dlp.CancelDlpJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_cancel_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CancelDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.cancel_dlp_job(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CancelDlpJobRequest, dict]]): - The request object. The request message for canceling a - DLP job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - request = dlp.CancelDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_dlp_job, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_stored_info_type(self, - request: Optional[Union[dlp.CreateStoredInfoTypeRequest, dict]] = None, - *, - parent: Optional[str] = None, - config: Optional[dlp.StoredInfoTypeConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateStoredInfoTypeRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest, dict]]): - The request object. Request message for - CreateStoredInfoType. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): - Required. Configuration of the - storedInfoType to create. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, config]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if config is not None: - request.config = config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_stored_info_type, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_stored_info_type(self, - request: Optional[Union[dlp.UpdateStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - config: Optional[dlp.StoredInfoTypeConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_update_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.update_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest, dict]]): - The request object. Request message for - UpdateStoredInfoType. - name (:class:`str`): - Required. Resource name of organization and - storedInfoType to be updated, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): - Updated configuration for the - storedInfoType. If not provided, a new - version of the storedInfoType will be - created with the existing configuration. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, config, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.UpdateStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if config is not None: - request.config = config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_stored_info_type, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_stored_info_type(self, - request: Optional[Union[dlp.GetStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Gets a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetStoredInfoTypeRequest, dict]]): - The request object. Request message for - GetStoredInfoType. - name (:class:`str`): - Required. Resource name of the organization and - storedInfoType to be read, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_stored_info_type, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_stored_info_types(self, - request: Optional[Union[dlp.ListStoredInfoTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListStoredInfoTypesAsyncPager: - r"""Lists stored infoTypes. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_stored_info_types(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListStoredInfoTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_stored_info_types(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListStoredInfoTypesRequest, dict]]): - The request object. Request message for - ListStoredInfoTypes. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager: - Response message for - ListStoredInfoTypes. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListStoredInfoTypesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_stored_info_types, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListStoredInfoTypesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_stored_info_type(self, - request: Optional[Union[dlp.DeleteStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - await client.delete_stored_info_type(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest, dict]]): - The request object. Request message for - DeleteStoredInfoType. - name (:class:`str`): - Required. Resource name of the organization and - storedInfoType to be deleted, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_stored_info_type, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def hybrid_inspect_dlp_job(self, - request: Optional[Union[dlp.HybridInspectDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a job. - To review the findings, inspect the job. Inspection will - occur asynchronously. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_hybrid_inspect_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectDlpJobRequest( - name="name_value", - ) - - # Make the request - response = await client.hybrid_inspect_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.HybridInspectDlpJobRequest, dict]]): - The request object. Request to search for potentially - sensitive info in a custom location. - name (:class:`str`): - Required. Resource name of the job to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/dlpJob/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.HybridInspectDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.hybrid_inspect_dlp_job, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def finish_dlp_job(self, - request: Optional[Union[dlp.FinishDlpJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_finish_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.FinishDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.finish_dlp_job(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.FinishDlpJobRequest, dict]]): - The request object. The request message for finishing a - DLP hybrid job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - request = dlp.FinishDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.finish_dlp_job, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DlpServiceAsyncClient", -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py deleted file mode 100644 index 9d90a824..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py +++ /dev/null @@ -1,4269 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.dlp_v2 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dlp_v2.services.dlp_service import pagers -from google.cloud.dlp_v2.types import dlp -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DlpServiceGrpcTransport -from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport -from .transports.rest import DlpServiceRestTransport - - -class DlpServiceClientMeta(type): - """Metaclass for the DlpService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] - _transport_registry["grpc"] = DlpServiceGrpcTransport - _transport_registry["grpc_asyncio"] = DlpServiceGrpcAsyncIOTransport - _transport_registry["rest"] = DlpServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DlpServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DlpServiceClient(metaclass=DlpServiceClientMeta): - """The Cloud Data Loss Prevention (DLP) API is a service that - allows clients to detect the presence of Personally Identifiable - Information (PII) and other privacy-sensitive data in - user-supplied, unstructured data streams, like text blocks or - images. - The service also includes methods for sensitive data redaction - and scheduling of data scans on Google Cloud Platform based data - sets. - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "dlp.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DlpServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DlpServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def deidentify_template_path(organization: str,deidentify_template: str,) -> str: - """Returns a fully-qualified deidentify_template string.""" - return "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) - - @staticmethod - def parse_deidentify_template_path(path: str) -> Dict[str,str]: - """Parses a deidentify_template path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/deidentifyTemplates/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def dlp_content_path(project: str,) -> str: - """Returns a fully-qualified dlp_content string.""" - return "projects/{project}/dlpContent".format(project=project, ) - - @staticmethod - def parse_dlp_content_path(path: str) -> Dict[str,str]: - """Parses a dlp_content path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/dlpContent$", path) - return m.groupdict() if m else {} - - @staticmethod - def dlp_job_path(project: str,dlp_job: str,) -> str: - """Returns a fully-qualified dlp_job string.""" - return "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) - - @staticmethod - def parse_dlp_job_path(path: str) -> Dict[str,str]: - """Parses a dlp_job path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/dlpJobs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def finding_path(project: str,location: str,finding: str,) -> str: - """Returns a fully-qualified finding string.""" - return "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) - - @staticmethod - def parse_finding_path(path: str) -> Dict[str,str]: - """Parses a finding path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/findings/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def inspect_template_path(organization: str,inspect_template: str,) -> str: - """Returns a fully-qualified inspect_template string.""" - return "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) - - @staticmethod - def parse_inspect_template_path(path: str) -> Dict[str,str]: - """Parses a inspect_template path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/inspectTemplates/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def job_trigger_path(project: str,job_trigger: str,) -> str: - """Returns a fully-qualified job_trigger string.""" - return "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) - - @staticmethod - def parse_job_trigger_path(path: str) -> Dict[str,str]: - """Parses a job_trigger path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/jobTriggers/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def stored_info_type_path(organization: str,stored_info_type: str,) -> str: - """Returns a fully-qualified stored_info_type string.""" - return "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) - - @staticmethod - def parse_stored_info_type_path(path: str) -> Dict[str,str]: - """Parses a stored_info_type path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/storedInfoTypes/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DlpServiceTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the dlp service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, DlpServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, DlpServiceTransport): - # transport is a DlpServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def inspect_content(self, - request: Optional[Union[dlp.InspectContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectContentResponse: - r"""Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - For how to guides, see - https://cloud.google.com/dlp/docs/inspecting-images and - https://cloud.google.com/dlp/docs/inspecting-text, - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_inspect_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.InspectContentRequest( - ) - - # Make the request - response = client.inspect_content(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.InspectContentRequest, dict]): - The request object. Request to search for potentially - sensitive info in a ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectContentResponse: - Results of inspecting an item. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.InspectContentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.InspectContentRequest): - request = dlp.InspectContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.inspect_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def redact_image(self, - request: Optional[Union[dlp.RedactImageRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.RedactImageResponse: - r"""Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/dlp/docs/redacting-sensitive-data-images - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_redact_image(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.RedactImageRequest( - ) - - # Make the request - response = client.redact_image(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.RedactImageRequest, dict]): - The request object. Request to search for potentially - sensitive info in an image and redact it by covering it - with a colored rectangle. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.RedactImageResponse: - Results of redacting an image. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.RedactImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.RedactImageRequest): - request = dlp.RedactImageRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.redact_image] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def deidentify_content(self, - request: Optional[Union[dlp.DeidentifyContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyContentResponse: - r"""De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/dlp/docs/deidentify-sensitive-data - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_deidentify_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeidentifyContentRequest( - ) - - # Make the request - response = client.deidentify_content(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.DeidentifyContentRequest, dict]): - The request object. Request to de-identify a - ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyContentResponse: - Results of de-identifying a - ContentItem. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeidentifyContentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeidentifyContentRequest): - request = dlp.DeidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.deidentify_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def reidentify_content(self, - request: Optional[Union[dlp.ReidentifyContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.ReidentifyContentResponse: - r"""Re-identifies content that has been de-identified. See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_reidentify_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ReidentifyContentRequest( - parent="parent_value", - ) - - # Make the request - response = client.reidentify_content(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ReidentifyContentRequest, dict]): - The request object. Request to re-identify an item. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.ReidentifyContentResponse: - Results of re-identifying an item. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ReidentifyContentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ReidentifyContentRequest): - request = dlp.ReidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.reidentify_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_info_types(self, - request: Optional[Union[dlp.ListInfoTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.ListInfoTypesResponse: - r"""Returns a list of the sensitive information types - that DLP API supports. See - https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_info_types(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListInfoTypesRequest( - ) - - # Make the request - response = client.list_info_types(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListInfoTypesRequest, dict]): - The request object. Request for the list of infoTypes. - parent (str): - The parent resource name. - - The format of this value is as follows: - - :: - - locations/LOCATION_ID - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.ListInfoTypesResponse: - Response to the ListInfoTypes - request. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListInfoTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListInfoTypesRequest): - request = dlp.ListInfoTypesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_info_types] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_inspect_template(self, - request: Optional[Union[dlp.CreateInspectTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - inspect_template: Optional[dlp.InspectTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Creates an InspectTemplate for reusing frequently - used configuration for inspecting content, images, and - storage. See - https://cloud.google.com/dlp/docs/creating-templates to - learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateInspectTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateInspectTemplateRequest, dict]): - The request object. Request message for - CreateInspectTemplate. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - Required. The InspectTemplate to - create. - - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, inspect_template]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateInspectTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateInspectTemplateRequest): - request = dlp.CreateInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_template is not None: - request.inspect_template = inspect_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_inspect_template(self, - request: Optional[Union[dlp.UpdateInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - inspect_template: Optional[dlp.InspectTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_update_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.update_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.UpdateInspectTemplateRequest, dict]): - The request object. Request message for - UpdateInspectTemplate. - name (str): - Required. Resource name of organization and - inspectTemplate to be updated, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - New InspectTemplate value. - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, inspect_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.UpdateInspectTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.UpdateInspectTemplateRequest): - request = dlp.UpdateInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if inspect_template is not None: - request.inspect_template = inspect_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_inspect_template(self, - request: Optional[Union[dlp.GetInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetInspectTemplateRequest, dict]): - The request object. Request message for - GetInspectTemplate. - name (str): - Required. Resource name of the organization and - inspectTemplate to be read, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetInspectTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetInspectTemplateRequest): - request = dlp.GetInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_inspect_templates(self, - request: Optional[Union[dlp.ListInspectTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInspectTemplatesPager: - r"""Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_inspect_templates(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListInspectTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_inspect_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListInspectTemplatesRequest, dict]): - The request object. Request message for - ListInspectTemplates. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager: - Response message for - ListInspectTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListInspectTemplatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListInspectTemplatesRequest): - request = dlp.ListInspectTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_inspect_templates] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInspectTemplatesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_inspect_template(self, - request: Optional[Union[dlp.DeleteInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteInspectTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_inspect_template(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteInspectTemplateRequest, dict]): - The request object. Request message for - DeleteInspectTemplate. - name (str): - Required. Resource name of the organization and - inspectTemplate to be deleted, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteInspectTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteInspectTemplateRequest): - request = dlp.DeleteInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_deidentify_template(self, - request: Optional[Union[dlp.CreateDeidentifyTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - deidentify_template: Optional[dlp.DeidentifyTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Creates a DeidentifyTemplate for reusing frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDeidentifyTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest, dict]): - The request object. Request message for - CreateDeidentifyTemplate. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - Required. The DeidentifyTemplate to - create. - - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, deidentify_template]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateDeidentifyTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateDeidentifyTemplateRequest): - request = dlp.CreateDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if deidentify_template is not None: - request.deidentify_template = deidentify_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_deidentify_template(self, - request: Optional[Union[dlp.UpdateDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - deidentify_template: Optional[dlp.DeidentifyTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Updates the DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_update_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.update_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest, dict]): - The request object. Request message for - UpdateDeidentifyTemplate. - name (str): - Required. Resource name of organization and deidentify - template to be updated, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - New DeidentifyTemplate value. - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, deidentify_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.UpdateDeidentifyTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.UpdateDeidentifyTemplateRequest): - request = dlp.UpdateDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if deidentify_template is not None: - request.deidentify_template = deidentify_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_deidentify_template(self, - request: Optional[Union[dlp.GetDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Gets a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest, dict]): - The request object. Request message for - GetDeidentifyTemplate. - name (str): - Required. Resource name of the organization and - deidentify template to be read, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetDeidentifyTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetDeidentifyTemplateRequest): - request = dlp.GetDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_deidentify_templates(self, - request: Optional[Union[dlp.ListDeidentifyTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDeidentifyTemplatesPager: - r"""Lists DeidentifyTemplates. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_deidentify_templates(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListDeidentifyTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_deidentify_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest, dict]): - The request object. Request message for - ListDeidentifyTemplates. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager: - Response message for - ListDeidentifyTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListDeidentifyTemplatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListDeidentifyTemplatesRequest): - request = dlp.ListDeidentifyTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_deidentify_templates] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDeidentifyTemplatesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_deidentify_template(self, - request: Optional[Union[dlp.DeleteDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_deidentify_template(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest, dict]): - The request object. Request message for - DeleteDeidentifyTemplate. - name (str): - Required. Resource name of the organization and - deidentify template to be deleted, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteDeidentifyTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteDeidentifyTemplateRequest): - request = dlp.DeleteDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_job_trigger(self, - request: Optional[Union[dlp.CreateJobTriggerRequest, dict]] = None, - *, - parent: Optional[str] = None, - job_trigger: Optional[dlp.JobTrigger] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - job_trigger = dlp_v2.JobTrigger() - job_trigger.status = "CANCELLED" - - request = dlp_v2.CreateJobTriggerRequest( - parent="parent_value", - job_trigger=job_trigger, - ) - - # Make the request - response = client.create_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateJobTriggerRequest, dict]): - The request object. Request message for - CreateJobTrigger. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - Required. The JobTrigger to create. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job_trigger]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateJobTriggerRequest): - request = dlp.CreateJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job_trigger is not None: - request.job_trigger = job_trigger - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_job_trigger(self, - request: Optional[Union[dlp.UpdateJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - job_trigger: Optional[dlp.JobTrigger] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Updates a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_update_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.update_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.UpdateJobTriggerRequest, dict]): - The request object. Request message for - UpdateJobTrigger. - name (str): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - New JobTrigger value. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, job_trigger, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.UpdateJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.UpdateJobTriggerRequest): - request = dlp.UpdateJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if job_trigger is not None: - request.job_trigger = job_trigger - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def hybrid_inspect_job_trigger(self, - request: Optional[Union[dlp.HybridInspectJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_hybrid_inspect_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.hybrid_inspect_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest, dict]): - The request object. Request to search for potentially - sensitive info in a custom location. - name (str): - Required. Resource name of the trigger to execute a - hybrid inspect on, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.HybridInspectJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.HybridInspectJobTriggerRequest): - request = dlp.HybridInspectJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job_trigger(self, - request: Optional[Union[dlp.GetJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Gets a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.get_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetJobTriggerRequest, dict]): - The request object. Request message for GetJobTrigger. - name (str): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetJobTriggerRequest): - request = dlp.GetJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_job_triggers(self, - request: Optional[Union[dlp.ListJobTriggersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobTriggersPager: - r"""Lists job triggers. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_job_triggers(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListJobTriggersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_triggers(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListJobTriggersRequest, dict]): - The request object. Request message for ListJobTriggers. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager: - Response message for ListJobTriggers. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListJobTriggersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListJobTriggersRequest): - request = dlp.ListJobTriggersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_job_triggers] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobTriggersPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_job_trigger(self, - request: Optional[Union[dlp.DeleteJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteJobTriggerRequest( - name="name_value", - ) - - # Make the request - client.delete_job_trigger(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteJobTriggerRequest, dict]): - The request object. Request message for - DeleteJobTrigger. - name (str): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteJobTriggerRequest): - request = dlp.DeleteJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def activate_job_trigger(self, - request: Optional[Union[dlp.ActivateJobTriggerRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_activate_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ActivateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.activate_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ActivateJobTriggerRequest, dict]): - The request object. Request message for - ActivateJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ActivateJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ActivateJobTriggerRequest): - request = dlp.ActivateJobTriggerRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.activate_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_dlp_job(self, - request: Optional[Union[dlp.CreateDlpJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - inspect_job: Optional[dlp.InspectJobConfig] = None, - risk_job: Optional[dlp.RiskAnalysisJobConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDlpJobRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateDlpJobRequest, dict]): - The request object. Request message for - CreateDlpJobRequest. Used to initiate long running jobs - such as calculating risk metrics or inspecting Google - Cloud Storage. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): - An inspection job scans a storage - repository for InfoTypes. - - This corresponds to the ``inspect_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): - A risk analysis job calculates - re-identification risk metrics for a - BigQuery table. - - This corresponds to the ``risk_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, inspect_job, risk_job]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateDlpJobRequest): - request = dlp.CreateDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_job is not None: - request.inspect_job = inspect_job - if risk_job is not None: - request.risk_job = risk_job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_dlp_jobs(self, - request: Optional[Union[dlp.ListDlpJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDlpJobsPager: - r"""Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_dlp_jobs(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListDlpJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_dlp_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListDlpJobsRequest, dict]): - The request object. The request message for listing DLP - jobs. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager: - The response message for listing DLP - jobs. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListDlpJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListDlpJobsRequest): - request = dlp.ListDlpJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_dlp_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDlpJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_dlp_job(self, - request: Optional[Union[dlp.GetDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetDlpJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetDlpJobRequest, dict]): - The request object. The request message for - [DlpJobs.GetDlpJob][]. - name (str): - Required. The name of the DlpJob - resource. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetDlpJobRequest): - request = dlp.GetDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_dlp_job(self, - request: Optional[Union[dlp.DeleteDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be canceled if possible. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDlpJobRequest( - name="name_value", - ) - - # Make the request - client.delete_dlp_job(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteDlpJobRequest, dict]): - The request object. The request message for deleting a - DLP job. - name (str): - Required. The name of the DlpJob - resource to be deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteDlpJobRequest): - request = dlp.DeleteDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def cancel_dlp_job(self, - request: Optional[Union[dlp.CancelDlpJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_cancel_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CancelDlpJobRequest( - name="name_value", - ) - - # Make the request - client.cancel_dlp_job(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.CancelDlpJobRequest, dict]): - The request object. The request message for canceling a - DLP job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CancelDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CancelDlpJobRequest): - request = dlp.CancelDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_stored_info_type(self, - request: Optional[Union[dlp.CreateStoredInfoTypeRequest, dict]] = None, - *, - parent: Optional[str] = None, - config: Optional[dlp.StoredInfoTypeConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateStoredInfoTypeRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest, dict]): - The request object. Request message for - CreateStoredInfoType. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Required. Configuration of the - storedInfoType to create. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, config]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateStoredInfoTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateStoredInfoTypeRequest): - request = dlp.CreateStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if config is not None: - request.config = config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_stored_info_type(self, - request: Optional[Union[dlp.UpdateStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - config: Optional[dlp.StoredInfoTypeConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_update_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = client.update_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest, dict]): - The request object. Request message for - UpdateStoredInfoType. - name (str): - Required. Resource name of organization and - storedInfoType to be updated, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Updated configuration for the - storedInfoType. If not provided, a new - version of the storedInfoType will be - created with the existing configuration. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, config, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.UpdateStoredInfoTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.UpdateStoredInfoTypeRequest): - request = dlp.UpdateStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if config is not None: - request.config = config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_stored_info_type(self, - request: Optional[Union[dlp.GetStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Gets a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = client.get_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetStoredInfoTypeRequest, dict]): - The request object. Request message for - GetStoredInfoType. - name (str): - Required. Resource name of the organization and - storedInfoType to be read, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetStoredInfoTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetStoredInfoTypeRequest): - request = dlp.GetStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_stored_info_types(self, - request: Optional[Union[dlp.ListStoredInfoTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListStoredInfoTypesPager: - r"""Lists stored infoTypes. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_stored_info_types(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListStoredInfoTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_stored_info_types(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListStoredInfoTypesRequest, dict]): - The request object. Request message for - ListStoredInfoTypes. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager: - Response message for - ListStoredInfoTypes. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListStoredInfoTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListStoredInfoTypesRequest): - request = dlp.ListStoredInfoTypesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_stored_info_types] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListStoredInfoTypesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_stored_info_type(self, - request: Optional[Union[dlp.DeleteStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - client.delete_stored_info_type(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest, dict]): - The request object. Request message for - DeleteStoredInfoType. - name (str): - Required. Resource name of the organization and - storedInfoType to be deleted, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteStoredInfoTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteStoredInfoTypeRequest): - request = dlp.DeleteStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def hybrid_inspect_dlp_job(self, - request: Optional[Union[dlp.HybridInspectDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a job. - To review the findings, inspect the job. Inspection will - occur asynchronously. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_hybrid_inspect_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectDlpJobRequest( - name="name_value", - ) - - # Make the request - response = client.hybrid_inspect_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.HybridInspectDlpJobRequest, dict]): - The request object. Request to search for potentially - sensitive info in a custom location. - name (str): - Required. Resource name of the job to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/dlpJob/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.HybridInspectDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.HybridInspectDlpJobRequest): - request = dlp.HybridInspectDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def finish_dlp_job(self, - request: Optional[Union[dlp.FinishDlpJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_finish_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.FinishDlpJobRequest( - name="name_value", - ) - - # Make the request - client.finish_dlp_job(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.FinishDlpJobRequest, dict]): - The request object. The request message for finishing a - DLP hybrid job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.FinishDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.FinishDlpJobRequest): - request = dlp.FinishDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.finish_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def __enter__(self) -> "DlpServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DlpServiceClient", -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py deleted file mode 100644 index 73a0e48f..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py +++ /dev/null @@ -1,623 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.dlp_v2.types import dlp - - -class ListInspectTemplatesPager: - """A pager for iterating through ``list_inspect_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``inspect_templates`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInspectTemplates`` requests and continue to iterate - through the ``inspect_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListInspectTemplatesResponse], - request: dlp.ListInspectTemplatesRequest, - response: dlp.ListInspectTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListInspectTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListInspectTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.InspectTemplate]: - for page in self.pages: - yield from page.inspect_templates - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInspectTemplatesAsyncPager: - """A pager for iterating through ``list_inspect_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``inspect_templates`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInspectTemplates`` requests and continue to iterate - through the ``inspect_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListInspectTemplatesResponse]], - request: dlp.ListInspectTemplatesRequest, - response: dlp.ListInspectTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListInspectTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListInspectTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.InspectTemplate]: - async def async_generator(): - async for page in self.pages: - for response in page.inspect_templates: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDeidentifyTemplatesPager: - """A pager for iterating through ``list_deidentify_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``deidentify_templates`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDeidentifyTemplates`` requests and continue to iterate - through the ``deidentify_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListDeidentifyTemplatesResponse], - request: dlp.ListDeidentifyTemplatesRequest, - response: dlp.ListDeidentifyTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListDeidentifyTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListDeidentifyTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.DeidentifyTemplate]: - for page in self.pages: - yield from page.deidentify_templates - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDeidentifyTemplatesAsyncPager: - """A pager for iterating through ``list_deidentify_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``deidentify_templates`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDeidentifyTemplates`` requests and continue to iterate - through the ``deidentify_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListDeidentifyTemplatesResponse]], - request: dlp.ListDeidentifyTemplatesRequest, - response: dlp.ListDeidentifyTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListDeidentifyTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListDeidentifyTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.DeidentifyTemplate]: - async def async_generator(): - async for page in self.pages: - for response in page.deidentify_templates: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobTriggersPager: - """A pager for iterating through ``list_job_triggers`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and - provides an ``__iter__`` method to iterate through its - ``job_triggers`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobTriggers`` requests and continue to iterate - through the ``job_triggers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListJobTriggersResponse], - request: dlp.ListJobTriggersRequest, - response: dlp.ListJobTriggersResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListJobTriggersRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListJobTriggersResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListJobTriggersRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListJobTriggersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.JobTrigger]: - for page in self.pages: - yield from page.job_triggers - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobTriggersAsyncPager: - """A pager for iterating through ``list_job_triggers`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``job_triggers`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobTriggers`` requests and continue to iterate - through the ``job_triggers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListJobTriggersResponse]], - request: dlp.ListJobTriggersRequest, - response: dlp.ListJobTriggersResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListJobTriggersRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListJobTriggersResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListJobTriggersRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListJobTriggersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.JobTrigger]: - async def async_generator(): - async for page in self.pages: - for response in page.job_triggers: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDlpJobsPager: - """A pager for iterating through ``list_dlp_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDlpJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListDlpJobsResponse], - request: dlp.ListDlpJobsRequest, - response: dlp.ListDlpJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDlpJobsRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDlpJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListDlpJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListDlpJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.DlpJob]: - for page in self.pages: - yield from page.jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDlpJobsAsyncPager: - """A pager for iterating through ``list_dlp_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDlpJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListDlpJobsResponse]], - request: dlp.ListDlpJobsRequest, - response: dlp.ListDlpJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDlpJobsRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDlpJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListDlpJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListDlpJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.DlpJob]: - async def async_generator(): - async for page in self.pages: - for response in page.jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListStoredInfoTypesPager: - """A pager for iterating through ``list_stored_info_types`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``stored_info_types`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListStoredInfoTypes`` requests and continue to iterate - through the ``stored_info_types`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListStoredInfoTypesResponse], - request: dlp.ListStoredInfoTypesRequest, - response: dlp.ListStoredInfoTypesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListStoredInfoTypesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListStoredInfoTypesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.StoredInfoType]: - for page in self.pages: - yield from page.stored_info_types - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListStoredInfoTypesAsyncPager: - """A pager for iterating through ``list_stored_info_types`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``stored_info_types`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListStoredInfoTypes`` requests and continue to iterate - through the ``stored_info_types`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListStoredInfoTypesResponse]], - request: dlp.ListStoredInfoTypesRequest, - response: dlp.ListStoredInfoTypesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListStoredInfoTypesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListStoredInfoTypesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.StoredInfoType]: - async def async_generator(): - async for page in self.pages: - for response in page.stored_info_types: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py deleted file mode 100644 index df9b4279..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DlpServiceTransport -from .grpc import DlpServiceGrpcTransport -from .grpc_asyncio import DlpServiceGrpcAsyncIOTransport -from .rest import DlpServiceRestTransport -from .rest import DlpServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] -_transport_registry['grpc'] = DlpServiceGrpcTransport -_transport_registry['grpc_asyncio'] = DlpServiceGrpcAsyncIOTransport -_transport_registry['rest'] = DlpServiceRestTransport - -__all__ = ( - 'DlpServiceTransport', - 'DlpServiceGrpcTransport', - 'DlpServiceGrpcAsyncIOTransport', - 'DlpServiceRestTransport', - 'DlpServiceRestInterceptor', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py deleted file mode 100644 index e90545e1..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py +++ /dev/null @@ -1,752 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dlp_v2 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dlp_v2.types import dlp -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class DlpServiceTransport(abc.ABC): - """Abstract transport class for DlpService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dlp.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.inspect_content: gapic_v1.method.wrap_method( - self.inspect_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.redact_image: gapic_v1.method.wrap_method( - self.redact_image, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.deidentify_content: gapic_v1.method.wrap_method( - self.deidentify_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.reidentify_content: gapic_v1.method.wrap_method( - self.reidentify_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_info_types: gapic_v1.method.wrap_method( - self.list_info_types, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_inspect_template: gapic_v1.method.wrap_method( - self.create_inspect_template, - default_timeout=300.0, - client_info=client_info, - ), - self.update_inspect_template: gapic_v1.method.wrap_method( - self.update_inspect_template, - default_timeout=300.0, - client_info=client_info, - ), - self.get_inspect_template: gapic_v1.method.wrap_method( - self.get_inspect_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_inspect_templates: gapic_v1.method.wrap_method( - self.list_inspect_templates, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_inspect_template: gapic_v1.method.wrap_method( - self.delete_inspect_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_deidentify_template: gapic_v1.method.wrap_method( - self.create_deidentify_template, - default_timeout=300.0, - client_info=client_info, - ), - self.update_deidentify_template: gapic_v1.method.wrap_method( - self.update_deidentify_template, - default_timeout=300.0, - client_info=client_info, - ), - self.get_deidentify_template: gapic_v1.method.wrap_method( - self.get_deidentify_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_deidentify_templates: gapic_v1.method.wrap_method( - self.list_deidentify_templates, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_deidentify_template: gapic_v1.method.wrap_method( - self.delete_deidentify_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_job_trigger: gapic_v1.method.wrap_method( - self.create_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.update_job_trigger: gapic_v1.method.wrap_method( - self.update_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.hybrid_inspect_job_trigger: gapic_v1.method.wrap_method( - self.hybrid_inspect_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.get_job_trigger: gapic_v1.method.wrap_method( - self.get_job_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_job_triggers: gapic_v1.method.wrap_method( - self.list_job_triggers, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_job_trigger: gapic_v1.method.wrap_method( - self.delete_job_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.activate_job_trigger: gapic_v1.method.wrap_method( - self.activate_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.create_dlp_job: gapic_v1.method.wrap_method( - self.create_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.list_dlp_jobs: gapic_v1.method.wrap_method( - self.list_dlp_jobs, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_dlp_job: gapic_v1.method.wrap_method( - self.get_dlp_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_dlp_job: gapic_v1.method.wrap_method( - self.delete_dlp_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.cancel_dlp_job: gapic_v1.method.wrap_method( - self.cancel_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.create_stored_info_type: gapic_v1.method.wrap_method( - self.create_stored_info_type, - default_timeout=300.0, - client_info=client_info, - ), - self.update_stored_info_type: gapic_v1.method.wrap_method( - self.update_stored_info_type, - default_timeout=300.0, - client_info=client_info, - ), - self.get_stored_info_type: gapic_v1.method.wrap_method( - self.get_stored_info_type, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_stored_info_types: gapic_v1.method.wrap_method( - self.list_stored_info_types, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_stored_info_type: gapic_v1.method.wrap_method( - self.delete_stored_info_type, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.hybrid_inspect_dlp_job: gapic_v1.method.wrap_method( - self.hybrid_inspect_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.finish_dlp_job: gapic_v1.method.wrap_method( - self.finish_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - Union[ - dlp.InspectContentResponse, - Awaitable[dlp.InspectContentResponse] - ]]: - raise NotImplementedError() - - @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - Union[ - dlp.RedactImageResponse, - Awaitable[dlp.RedactImageResponse] - ]]: - raise NotImplementedError() - - @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - Union[ - dlp.DeidentifyContentResponse, - Awaitable[dlp.DeidentifyContentResponse] - ]]: - raise NotImplementedError() - - @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - Union[ - dlp.ReidentifyContentResponse, - Awaitable[dlp.ReidentifyContentResponse] - ]]: - raise NotImplementedError() - - @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - Union[ - dlp.ListInfoTypesResponse, - Awaitable[dlp.ListInfoTypesResponse] - ]]: - raise NotImplementedError() - - @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - Union[ - dlp.InspectTemplate, - Awaitable[dlp.InspectTemplate] - ]]: - raise NotImplementedError() - - @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - Union[ - dlp.InspectTemplate, - Awaitable[dlp.InspectTemplate] - ]]: - raise NotImplementedError() - - @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - Union[ - dlp.InspectTemplate, - Awaitable[dlp.InspectTemplate] - ]]: - raise NotImplementedError() - - @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - Union[ - dlp.ListInspectTemplatesResponse, - Awaitable[dlp.ListInspectTemplatesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - Union[ - dlp.DeidentifyTemplate, - Awaitable[dlp.DeidentifyTemplate] - ]]: - raise NotImplementedError() - - @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - Union[ - dlp.DeidentifyTemplate, - Awaitable[dlp.DeidentifyTemplate] - ]]: - raise NotImplementedError() - - @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - Union[ - dlp.DeidentifyTemplate, - Awaitable[dlp.DeidentifyTemplate] - ]]: - raise NotImplementedError() - - @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - Union[ - dlp.ListDeidentifyTemplatesResponse, - Awaitable[dlp.ListDeidentifyTemplatesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - Union[ - dlp.JobTrigger, - Awaitable[dlp.JobTrigger] - ]]: - raise NotImplementedError() - - @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - Union[ - dlp.JobTrigger, - Awaitable[dlp.JobTrigger] - ]]: - raise NotImplementedError() - - @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - Union[ - dlp.HybridInspectResponse, - Awaitable[dlp.HybridInspectResponse] - ]]: - raise NotImplementedError() - - @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - Union[ - dlp.JobTrigger, - Awaitable[dlp.JobTrigger] - ]]: - raise NotImplementedError() - - @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - Union[ - dlp.ListJobTriggersResponse, - Awaitable[dlp.ListJobTriggersResponse] - ]]: - raise NotImplementedError() - - @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - Union[ - dlp.DlpJob, - Awaitable[dlp.DlpJob] - ]]: - raise NotImplementedError() - - @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - Union[ - dlp.DlpJob, - Awaitable[dlp.DlpJob] - ]]: - raise NotImplementedError() - - @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - Union[ - dlp.ListDlpJobsResponse, - Awaitable[dlp.ListDlpJobsResponse] - ]]: - raise NotImplementedError() - - @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - Union[ - dlp.DlpJob, - Awaitable[dlp.DlpJob] - ]]: - raise NotImplementedError() - - @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - Union[ - dlp.StoredInfoType, - Awaitable[dlp.StoredInfoType] - ]]: - raise NotImplementedError() - - @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - Union[ - dlp.StoredInfoType, - Awaitable[dlp.StoredInfoType] - ]]: - raise NotImplementedError() - - @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - Union[ - dlp.StoredInfoType, - Awaitable[dlp.StoredInfoType] - ]]: - raise NotImplementedError() - - @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - Union[ - dlp.ListStoredInfoTypesResponse, - Awaitable[dlp.ListStoredInfoTypesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - Union[ - dlp.HybridInspectResponse, - Awaitable[dlp.HybridInspectResponse] - ]]: - raise NotImplementedError() - - @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DlpServiceTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py deleted file mode 100644 index d95be0ba..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py +++ /dev/null @@ -1,1262 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dlp_v2.types import dlp -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO - - -class DlpServiceGrpcTransport(DlpServiceTransport): - """gRPC backend transport for DlpService. - - The Cloud Data Loss Prevention (DLP) API is a service that - allows clients to detect the presence of Personally Identifiable - Information (PII) and other privacy-sensitive data in - user-supplied, unstructured data streams, like text blocks or - images. - The service also includes methods for sensitive data redaction - and scheduling of data scans on Google Cloud Platform based data - sets. - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - dlp.InspectContentResponse]: - r"""Return a callable for the inspect content method over gRPC. - - Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - For how to guides, see - https://cloud.google.com/dlp/docs/inspecting-images and - https://cloud.google.com/dlp/docs/inspecting-text, - - Returns: - Callable[[~.InspectContentRequest], - ~.InspectContentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'inspect_content' not in self._stubs: - self._stubs['inspect_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/InspectContent', - request_serializer=dlp.InspectContentRequest.serialize, - response_deserializer=dlp.InspectContentResponse.deserialize, - ) - return self._stubs['inspect_content'] - - @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - dlp.RedactImageResponse]: - r"""Return a callable for the redact image method over gRPC. - - Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/dlp/docs/redacting-sensitive-data-images - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.RedactImageRequest], - ~.RedactImageResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'redact_image' not in self._stubs: - self._stubs['redact_image'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/RedactImage', - request_serializer=dlp.RedactImageRequest.serialize, - response_deserializer=dlp.RedactImageResponse.deserialize, - ) - return self._stubs['redact_image'] - - @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - dlp.DeidentifyContentResponse]: - r"""Return a callable for the deidentify content method over gRPC. - - De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/dlp/docs/deidentify-sensitive-data - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.DeidentifyContentRequest], - ~.DeidentifyContentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'deidentify_content' not in self._stubs: - self._stubs['deidentify_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeidentifyContent', - request_serializer=dlp.DeidentifyContentRequest.serialize, - response_deserializer=dlp.DeidentifyContentResponse.deserialize, - ) - return self._stubs['deidentify_content'] - - @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - dlp.ReidentifyContentResponse]: - r"""Return a callable for the reidentify content method over gRPC. - - Re-identifies content that has been de-identified. See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - Returns: - Callable[[~.ReidentifyContentRequest], - ~.ReidentifyContentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'reidentify_content' not in self._stubs: - self._stubs['reidentify_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ReidentifyContent', - request_serializer=dlp.ReidentifyContentRequest.serialize, - response_deserializer=dlp.ReidentifyContentResponse.deserialize, - ) - return self._stubs['reidentify_content'] - - @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - dlp.ListInfoTypesResponse]: - r"""Return a callable for the list info types method over gRPC. - - Returns a list of the sensitive information types - that DLP API supports. See - https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - - Returns: - Callable[[~.ListInfoTypesRequest], - ~.ListInfoTypesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_info_types' not in self._stubs: - self._stubs['list_info_types'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInfoTypes', - request_serializer=dlp.ListInfoTypesRequest.serialize, - response_deserializer=dlp.ListInfoTypesResponse.deserialize, - ) - return self._stubs['list_info_types'] - - @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - dlp.InspectTemplate]: - r"""Return a callable for the create inspect template method over gRPC. - - Creates an InspectTemplate for reusing frequently - used configuration for inspecting content, images, and - storage. See - https://cloud.google.com/dlp/docs/creating-templates to - learn more. - - Returns: - Callable[[~.CreateInspectTemplateRequest], - ~.InspectTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_inspect_template' not in self._stubs: - self._stubs['create_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', - request_serializer=dlp.CreateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['create_inspect_template'] - - @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - dlp.InspectTemplate]: - r"""Return a callable for the update inspect template method over gRPC. - - Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.UpdateInspectTemplateRequest], - ~.InspectTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_inspect_template' not in self._stubs: - self._stubs['update_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', - request_serializer=dlp.UpdateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['update_inspect_template'] - - @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - dlp.InspectTemplate]: - r"""Return a callable for the get inspect template method over gRPC. - - Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.GetInspectTemplateRequest], - ~.InspectTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_inspect_template' not in self._stubs: - self._stubs['get_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', - request_serializer=dlp.GetInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['get_inspect_template'] - - @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - dlp.ListInspectTemplatesResponse]: - r"""Return a callable for the list inspect templates method over gRPC. - - Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.ListInspectTemplatesRequest], - ~.ListInspectTemplatesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_inspect_templates' not in self._stubs: - self._stubs['list_inspect_templates'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', - request_serializer=dlp.ListInspectTemplatesRequest.serialize, - response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, - ) - return self._stubs['list_inspect_templates'] - - @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete inspect template method over gRPC. - - Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.DeleteInspectTemplateRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_inspect_template' not in self._stubs: - self._stubs['delete_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', - request_serializer=dlp.DeleteInspectTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_inspect_template'] - - @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - r"""Return a callable for the create deidentify template method over gRPC. - - Creates a DeidentifyTemplate for reusing frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.CreateDeidentifyTemplateRequest], - ~.DeidentifyTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_deidentify_template' not in self._stubs: - self._stubs['create_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', - request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['create_deidentify_template'] - - @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - r"""Return a callable for the update deidentify template method over gRPC. - - Updates the DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.UpdateDeidentifyTemplateRequest], - ~.DeidentifyTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_deidentify_template' not in self._stubs: - self._stubs['update_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', - request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['update_deidentify_template'] - - @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - r"""Return a callable for the get deidentify template method over gRPC. - - Gets a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.GetDeidentifyTemplateRequest], - ~.DeidentifyTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_deidentify_template' not in self._stubs: - self._stubs['get_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', - request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['get_deidentify_template'] - - @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - dlp.ListDeidentifyTemplatesResponse]: - r"""Return a callable for the list deidentify templates method over gRPC. - - Lists DeidentifyTemplates. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.ListDeidentifyTemplatesRequest], - ~.ListDeidentifyTemplatesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_deidentify_templates' not in self._stubs: - self._stubs['list_deidentify_templates'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', - request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, - response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, - ) - return self._stubs['list_deidentify_templates'] - - @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete deidentify template method over gRPC. - - Deletes a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.DeleteDeidentifyTemplateRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_deidentify_template' not in self._stubs: - self._stubs['delete_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', - request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_deidentify_template'] - - @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - dlp.JobTrigger]: - r"""Return a callable for the create job trigger method over gRPC. - - Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.CreateJobTriggerRequest], - ~.JobTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job_trigger' not in self._stubs: - self._stubs['create_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', - request_serializer=dlp.CreateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['create_job_trigger'] - - @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - dlp.JobTrigger]: - r"""Return a callable for the update job trigger method over gRPC. - - Updates a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.UpdateJobTriggerRequest], - ~.JobTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_job_trigger' not in self._stubs: - self._stubs['update_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', - request_serializer=dlp.UpdateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['update_job_trigger'] - - @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - dlp.HybridInspectResponse]: - r"""Return a callable for the hybrid inspect job trigger method over gRPC. - - Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - - Returns: - Callable[[~.HybridInspectJobTriggerRequest], - ~.HybridInspectResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_job_trigger' not in self._stubs: - self._stubs['hybrid_inspect_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', - request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_job_trigger'] - - @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - dlp.JobTrigger]: - r"""Return a callable for the get job trigger method over gRPC. - - Gets a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.GetJobTriggerRequest], - ~.JobTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_trigger' not in self._stubs: - self._stubs['get_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetJobTrigger', - request_serializer=dlp.GetJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['get_job_trigger'] - - @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - dlp.ListJobTriggersResponse]: - r"""Return a callable for the list job triggers method over gRPC. - - Lists job triggers. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.ListJobTriggersRequest], - ~.ListJobTriggersResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_job_triggers' not in self._stubs: - self._stubs['list_job_triggers'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListJobTriggers', - request_serializer=dlp.ListJobTriggersRequest.serialize, - response_deserializer=dlp.ListJobTriggersResponse.deserialize, - ) - return self._stubs['list_job_triggers'] - - @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete job trigger method over gRPC. - - Deletes a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.DeleteJobTriggerRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job_trigger' not in self._stubs: - self._stubs['delete_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', - request_serializer=dlp.DeleteJobTriggerRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job_trigger'] - - @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - dlp.DlpJob]: - r"""Return a callable for the activate job trigger method over gRPC. - - Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - Returns: - Callable[[~.ActivateJobTriggerRequest], - ~.DlpJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'activate_job_trigger' not in self._stubs: - self._stubs['activate_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', - request_serializer=dlp.ActivateJobTriggerRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['activate_job_trigger'] - - @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - dlp.DlpJob]: - r"""Return a callable for the create dlp job method over gRPC. - - Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.CreateDlpJobRequest], - ~.DlpJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_dlp_job' not in self._stubs: - self._stubs['create_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDlpJob', - request_serializer=dlp.CreateDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['create_dlp_job'] - - @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - dlp.ListDlpJobsResponse]: - r"""Return a callable for the list dlp jobs method over gRPC. - - Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.ListDlpJobsRequest], - ~.ListDlpJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_dlp_jobs' not in self._stubs: - self._stubs['list_dlp_jobs'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDlpJobs', - request_serializer=dlp.ListDlpJobsRequest.serialize, - response_deserializer=dlp.ListDlpJobsResponse.deserialize, - ) - return self._stubs['list_dlp_jobs'] - - @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - dlp.DlpJob]: - r"""Return a callable for the get dlp job method over gRPC. - - Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.GetDlpJobRequest], - ~.DlpJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_dlp_job' not in self._stubs: - self._stubs['get_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDlpJob', - request_serializer=dlp.GetDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['get_dlp_job'] - - @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete dlp job method over gRPC. - - Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be canceled if possible. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.DeleteDlpJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_dlp_job' not in self._stubs: - self._stubs['delete_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', - request_serializer=dlp.DeleteDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_dlp_job'] - - @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the cancel dlp job method over gRPC. - - Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.CancelDlpJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_dlp_job' not in self._stubs: - self._stubs['cancel_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CancelDlpJob', - request_serializer=dlp.CancelDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['cancel_dlp_job'] - - @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - dlp.StoredInfoType]: - r"""Return a callable for the create stored info type method over gRPC. - - Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.CreateStoredInfoTypeRequest], - ~.StoredInfoType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_stored_info_type' not in self._stubs: - self._stubs['create_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', - request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['create_stored_info_type'] - - @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - dlp.StoredInfoType]: - r"""Return a callable for the update stored info type method over gRPC. - - Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.UpdateStoredInfoTypeRequest], - ~.StoredInfoType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_stored_info_type' not in self._stubs: - self._stubs['update_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', - request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['update_stored_info_type'] - - @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - dlp.StoredInfoType]: - r"""Return a callable for the get stored info type method over gRPC. - - Gets a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.GetStoredInfoTypeRequest], - ~.StoredInfoType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_stored_info_type' not in self._stubs: - self._stubs['get_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', - request_serializer=dlp.GetStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['get_stored_info_type'] - - @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - dlp.ListStoredInfoTypesResponse]: - r"""Return a callable for the list stored info types method over gRPC. - - Lists stored infoTypes. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.ListStoredInfoTypesRequest], - ~.ListStoredInfoTypesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_stored_info_types' not in self._stubs: - self._stubs['list_stored_info_types'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', - request_serializer=dlp.ListStoredInfoTypesRequest.serialize, - response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, - ) - return self._stubs['list_stored_info_types'] - - @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete stored info type method over gRPC. - - Deletes a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.DeleteStoredInfoTypeRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_stored_info_type' not in self._stubs: - self._stubs['delete_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', - request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_stored_info_type'] - - @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - dlp.HybridInspectResponse]: - r"""Return a callable for the hybrid inspect dlp job method over gRPC. - - Inspect hybrid content and store findings to a job. - To review the findings, inspect the job. Inspection will - occur asynchronously. - - Returns: - Callable[[~.HybridInspectDlpJobRequest], - ~.HybridInspectResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_dlp_job' not in self._stubs: - self._stubs['hybrid_inspect_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', - request_serializer=dlp.HybridInspectDlpJobRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_dlp_job'] - - @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the finish dlp job method over gRPC. - - Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. - - Returns: - Callable[[~.FinishDlpJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'finish_dlp_job' not in self._stubs: - self._stubs['finish_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/FinishDlpJob', - request_serializer=dlp.FinishDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['finish_dlp_job'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DlpServiceGrpcTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py deleted file mode 100644 index 03c8bf3c..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,1261 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dlp_v2.types import dlp -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import DlpServiceGrpcTransport - - -class DlpServiceGrpcAsyncIOTransport(DlpServiceTransport): - """gRPC AsyncIO backend transport for DlpService. - - The Cloud Data Loss Prevention (DLP) API is a service that - allows clients to detect the presence of Personally Identifiable - Information (PII) and other privacy-sensitive data in - user-supplied, unstructured data streams, like text blocks or - images. - The service also includes methods for sensitive data redaction - and scheduling of data scans on Google Cloud Platform based data - sets. - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - Awaitable[dlp.InspectContentResponse]]: - r"""Return a callable for the inspect content method over gRPC. - - Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - For how to guides, see - https://cloud.google.com/dlp/docs/inspecting-images and - https://cloud.google.com/dlp/docs/inspecting-text, - - Returns: - Callable[[~.InspectContentRequest], - Awaitable[~.InspectContentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'inspect_content' not in self._stubs: - self._stubs['inspect_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/InspectContent', - request_serializer=dlp.InspectContentRequest.serialize, - response_deserializer=dlp.InspectContentResponse.deserialize, - ) - return self._stubs['inspect_content'] - - @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - Awaitable[dlp.RedactImageResponse]]: - r"""Return a callable for the redact image method over gRPC. - - Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/dlp/docs/redacting-sensitive-data-images - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.RedactImageRequest], - Awaitable[~.RedactImageResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'redact_image' not in self._stubs: - self._stubs['redact_image'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/RedactImage', - request_serializer=dlp.RedactImageRequest.serialize, - response_deserializer=dlp.RedactImageResponse.deserialize, - ) - return self._stubs['redact_image'] - - @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - Awaitable[dlp.DeidentifyContentResponse]]: - r"""Return a callable for the deidentify content method over gRPC. - - De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/dlp/docs/deidentify-sensitive-data - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.DeidentifyContentRequest], - Awaitable[~.DeidentifyContentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'deidentify_content' not in self._stubs: - self._stubs['deidentify_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeidentifyContent', - request_serializer=dlp.DeidentifyContentRequest.serialize, - response_deserializer=dlp.DeidentifyContentResponse.deserialize, - ) - return self._stubs['deidentify_content'] - - @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - Awaitable[dlp.ReidentifyContentResponse]]: - r"""Return a callable for the reidentify content method over gRPC. - - Re-identifies content that has been de-identified. See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - Returns: - Callable[[~.ReidentifyContentRequest], - Awaitable[~.ReidentifyContentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'reidentify_content' not in self._stubs: - self._stubs['reidentify_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ReidentifyContent', - request_serializer=dlp.ReidentifyContentRequest.serialize, - response_deserializer=dlp.ReidentifyContentResponse.deserialize, - ) - return self._stubs['reidentify_content'] - - @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - Awaitable[dlp.ListInfoTypesResponse]]: - r"""Return a callable for the list info types method over gRPC. - - Returns a list of the sensitive information types - that DLP API supports. See - https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - - Returns: - Callable[[~.ListInfoTypesRequest], - Awaitable[~.ListInfoTypesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_info_types' not in self._stubs: - self._stubs['list_info_types'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInfoTypes', - request_serializer=dlp.ListInfoTypesRequest.serialize, - response_deserializer=dlp.ListInfoTypesResponse.deserialize, - ) - return self._stubs['list_info_types'] - - @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - Awaitable[dlp.InspectTemplate]]: - r"""Return a callable for the create inspect template method over gRPC. - - Creates an InspectTemplate for reusing frequently - used configuration for inspecting content, images, and - storage. See - https://cloud.google.com/dlp/docs/creating-templates to - learn more. - - Returns: - Callable[[~.CreateInspectTemplateRequest], - Awaitable[~.InspectTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_inspect_template' not in self._stubs: - self._stubs['create_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', - request_serializer=dlp.CreateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['create_inspect_template'] - - @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - Awaitable[dlp.InspectTemplate]]: - r"""Return a callable for the update inspect template method over gRPC. - - Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.UpdateInspectTemplateRequest], - Awaitable[~.InspectTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_inspect_template' not in self._stubs: - self._stubs['update_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', - request_serializer=dlp.UpdateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['update_inspect_template'] - - @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - Awaitable[dlp.InspectTemplate]]: - r"""Return a callable for the get inspect template method over gRPC. - - Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.GetInspectTemplateRequest], - Awaitable[~.InspectTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_inspect_template' not in self._stubs: - self._stubs['get_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', - request_serializer=dlp.GetInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['get_inspect_template'] - - @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - Awaitable[dlp.ListInspectTemplatesResponse]]: - r"""Return a callable for the list inspect templates method over gRPC. - - Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.ListInspectTemplatesRequest], - Awaitable[~.ListInspectTemplatesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_inspect_templates' not in self._stubs: - self._stubs['list_inspect_templates'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', - request_serializer=dlp.ListInspectTemplatesRequest.serialize, - response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, - ) - return self._stubs['list_inspect_templates'] - - @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete inspect template method over gRPC. - - Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.DeleteInspectTemplateRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_inspect_template' not in self._stubs: - self._stubs['delete_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', - request_serializer=dlp.DeleteInspectTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_inspect_template'] - - @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - Awaitable[dlp.DeidentifyTemplate]]: - r"""Return a callable for the create deidentify template method over gRPC. - - Creates a DeidentifyTemplate for reusing frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.CreateDeidentifyTemplateRequest], - Awaitable[~.DeidentifyTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_deidentify_template' not in self._stubs: - self._stubs['create_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', - request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['create_deidentify_template'] - - @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - Awaitable[dlp.DeidentifyTemplate]]: - r"""Return a callable for the update deidentify template method over gRPC. - - Updates the DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.UpdateDeidentifyTemplateRequest], - Awaitable[~.DeidentifyTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_deidentify_template' not in self._stubs: - self._stubs['update_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', - request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['update_deidentify_template'] - - @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - Awaitable[dlp.DeidentifyTemplate]]: - r"""Return a callable for the get deidentify template method over gRPC. - - Gets a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.GetDeidentifyTemplateRequest], - Awaitable[~.DeidentifyTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_deidentify_template' not in self._stubs: - self._stubs['get_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', - request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['get_deidentify_template'] - - @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - Awaitable[dlp.ListDeidentifyTemplatesResponse]]: - r"""Return a callable for the list deidentify templates method over gRPC. - - Lists DeidentifyTemplates. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.ListDeidentifyTemplatesRequest], - Awaitable[~.ListDeidentifyTemplatesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_deidentify_templates' not in self._stubs: - self._stubs['list_deidentify_templates'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', - request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, - response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, - ) - return self._stubs['list_deidentify_templates'] - - @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete deidentify template method over gRPC. - - Deletes a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.DeleteDeidentifyTemplateRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_deidentify_template' not in self._stubs: - self._stubs['delete_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', - request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_deidentify_template'] - - @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - Awaitable[dlp.JobTrigger]]: - r"""Return a callable for the create job trigger method over gRPC. - - Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.CreateJobTriggerRequest], - Awaitable[~.JobTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job_trigger' not in self._stubs: - self._stubs['create_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', - request_serializer=dlp.CreateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['create_job_trigger'] - - @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - Awaitable[dlp.JobTrigger]]: - r"""Return a callable for the update job trigger method over gRPC. - - Updates a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.UpdateJobTriggerRequest], - Awaitable[~.JobTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_job_trigger' not in self._stubs: - self._stubs['update_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', - request_serializer=dlp.UpdateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['update_job_trigger'] - - @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - Awaitable[dlp.HybridInspectResponse]]: - r"""Return a callable for the hybrid inspect job trigger method over gRPC. - - Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - - Returns: - Callable[[~.HybridInspectJobTriggerRequest], - Awaitable[~.HybridInspectResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_job_trigger' not in self._stubs: - self._stubs['hybrid_inspect_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', - request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_job_trigger'] - - @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - Awaitable[dlp.JobTrigger]]: - r"""Return a callable for the get job trigger method over gRPC. - - Gets a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.GetJobTriggerRequest], - Awaitable[~.JobTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_trigger' not in self._stubs: - self._stubs['get_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetJobTrigger', - request_serializer=dlp.GetJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['get_job_trigger'] - - @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - Awaitable[dlp.ListJobTriggersResponse]]: - r"""Return a callable for the list job triggers method over gRPC. - - Lists job triggers. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.ListJobTriggersRequest], - Awaitable[~.ListJobTriggersResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_job_triggers' not in self._stubs: - self._stubs['list_job_triggers'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListJobTriggers', - request_serializer=dlp.ListJobTriggersRequest.serialize, - response_deserializer=dlp.ListJobTriggersResponse.deserialize, - ) - return self._stubs['list_job_triggers'] - - @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete job trigger method over gRPC. - - Deletes a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.DeleteJobTriggerRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job_trigger' not in self._stubs: - self._stubs['delete_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', - request_serializer=dlp.DeleteJobTriggerRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job_trigger'] - - @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - Awaitable[dlp.DlpJob]]: - r"""Return a callable for the activate job trigger method over gRPC. - - Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - Returns: - Callable[[~.ActivateJobTriggerRequest], - Awaitable[~.DlpJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'activate_job_trigger' not in self._stubs: - self._stubs['activate_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', - request_serializer=dlp.ActivateJobTriggerRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['activate_job_trigger'] - - @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - Awaitable[dlp.DlpJob]]: - r"""Return a callable for the create dlp job method over gRPC. - - Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.CreateDlpJobRequest], - Awaitable[~.DlpJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_dlp_job' not in self._stubs: - self._stubs['create_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDlpJob', - request_serializer=dlp.CreateDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['create_dlp_job'] - - @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - Awaitable[dlp.ListDlpJobsResponse]]: - r"""Return a callable for the list dlp jobs method over gRPC. - - Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.ListDlpJobsRequest], - Awaitable[~.ListDlpJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_dlp_jobs' not in self._stubs: - self._stubs['list_dlp_jobs'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDlpJobs', - request_serializer=dlp.ListDlpJobsRequest.serialize, - response_deserializer=dlp.ListDlpJobsResponse.deserialize, - ) - return self._stubs['list_dlp_jobs'] - - @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - Awaitable[dlp.DlpJob]]: - r"""Return a callable for the get dlp job method over gRPC. - - Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.GetDlpJobRequest], - Awaitable[~.DlpJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_dlp_job' not in self._stubs: - self._stubs['get_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDlpJob', - request_serializer=dlp.GetDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['get_dlp_job'] - - @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete dlp job method over gRPC. - - Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be canceled if possible. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.DeleteDlpJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_dlp_job' not in self._stubs: - self._stubs['delete_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', - request_serializer=dlp.DeleteDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_dlp_job'] - - @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the cancel dlp job method over gRPC. - - Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.CancelDlpJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_dlp_job' not in self._stubs: - self._stubs['cancel_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CancelDlpJob', - request_serializer=dlp.CancelDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['cancel_dlp_job'] - - @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - Awaitable[dlp.StoredInfoType]]: - r"""Return a callable for the create stored info type method over gRPC. - - Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.CreateStoredInfoTypeRequest], - Awaitable[~.StoredInfoType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_stored_info_type' not in self._stubs: - self._stubs['create_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', - request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['create_stored_info_type'] - - @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - Awaitable[dlp.StoredInfoType]]: - r"""Return a callable for the update stored info type method over gRPC. - - Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.UpdateStoredInfoTypeRequest], - Awaitable[~.StoredInfoType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_stored_info_type' not in self._stubs: - self._stubs['update_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', - request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['update_stored_info_type'] - - @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - Awaitable[dlp.StoredInfoType]]: - r"""Return a callable for the get stored info type method over gRPC. - - Gets a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.GetStoredInfoTypeRequest], - Awaitable[~.StoredInfoType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_stored_info_type' not in self._stubs: - self._stubs['get_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', - request_serializer=dlp.GetStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['get_stored_info_type'] - - @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - Awaitable[dlp.ListStoredInfoTypesResponse]]: - r"""Return a callable for the list stored info types method over gRPC. - - Lists stored infoTypes. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.ListStoredInfoTypesRequest], - Awaitable[~.ListStoredInfoTypesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_stored_info_types' not in self._stubs: - self._stubs['list_stored_info_types'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', - request_serializer=dlp.ListStoredInfoTypesRequest.serialize, - response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, - ) - return self._stubs['list_stored_info_types'] - - @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete stored info type method over gRPC. - - Deletes a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.DeleteStoredInfoTypeRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_stored_info_type' not in self._stubs: - self._stubs['delete_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', - request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_stored_info_type'] - - @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - Awaitable[dlp.HybridInspectResponse]]: - r"""Return a callable for the hybrid inspect dlp job method over gRPC. - - Inspect hybrid content and store findings to a job. - To review the findings, inspect the job. Inspection will - occur asynchronously. - - Returns: - Callable[[~.HybridInspectDlpJobRequest], - Awaitable[~.HybridInspectResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_dlp_job' not in self._stubs: - self._stubs['hybrid_inspect_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', - request_serializer=dlp.HybridInspectDlpJobRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_dlp_job'] - - @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the finish dlp job method over gRPC. - - Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. - - Returns: - Callable[[~.FinishDlpJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'finish_dlp_job' not in self._stubs: - self._stubs['finish_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/FinishDlpJob', - request_serializer=dlp.FinishDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['finish_dlp_job'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'DlpServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py deleted file mode 100644 index 105716f4..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py +++ /dev/null @@ -1,4325 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.dlp_v2.types import dlp -from google.protobuf import empty_pb2 # type: ignore - -from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class DlpServiceRestInterceptor: - """Interceptor for DlpService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the DlpServiceRestTransport. - - .. code-block:: python - class MyCustomDlpServiceInterceptor(DlpServiceRestInterceptor): - def pre_activate_job_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_activate_job_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_cancel_dlp_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_create_deidentify_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_deidentify_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_dlp_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_dlp_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_inspect_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_inspect_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_job_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_job_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_stored_info_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_stored_info_type(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_deidentify_content(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_deidentify_content(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_deidentify_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_dlp_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_inspect_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_job_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_stored_info_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_finish_dlp_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_deidentify_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_deidentify_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_dlp_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_dlp_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_inspect_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_inspect_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_job_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_job_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_stored_info_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_stored_info_type(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_hybrid_inspect_dlp_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_hybrid_inspect_dlp_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_hybrid_inspect_job_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_hybrid_inspect_job_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_inspect_content(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_inspect_content(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_deidentify_templates(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_deidentify_templates(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_dlp_jobs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_dlp_jobs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_info_types(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_info_types(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_inspect_templates(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_inspect_templates(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_job_triggers(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_job_triggers(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_stored_info_types(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_stored_info_types(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_redact_image(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_redact_image(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_reidentify_content(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_reidentify_content(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_deidentify_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_deidentify_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_inspect_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_inspect_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_job_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_job_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_stored_info_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_stored_info_type(self, response): - logging.log(f"Received response: {response}") - return response - - transport = DlpServiceRestTransport(interceptor=MyCustomDlpServiceInterceptor()) - client = DlpServiceClient(transport=transport) - - - """ - def pre_activate_job_trigger(self, request: dlp.ActivateJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ActivateJobTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for activate_job_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_activate_job_trigger(self, response: dlp.DlpJob) -> dlp.DlpJob: - """Post-rpc interceptor for activate_job_trigger - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_cancel_dlp_job(self, request: dlp.CancelDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CancelDlpJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_dlp_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_create_deidentify_template(self, request: dlp.CreateDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_deidentify_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_create_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: - """Post-rpc interceptor for create_deidentify_template - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_create_dlp_job(self, request: dlp.CreateDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateDlpJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_dlp_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_create_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: - """Post-rpc interceptor for create_dlp_job - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_create_inspect_template(self, request: dlp.CreateInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateInspectTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_inspect_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_create_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: - """Post-rpc interceptor for create_inspect_template - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_create_job_trigger(self, request: dlp.CreateJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateJobTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_job_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_create_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: - """Post-rpc interceptor for create_job_trigger - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_create_stored_info_type(self, request: dlp.CreateStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_stored_info_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_create_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: - """Post-rpc interceptor for create_stored_info_type - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_deidentify_content(self, request: dlp.DeidentifyContentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeidentifyContentRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for deidentify_content - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_deidentify_content(self, response: dlp.DeidentifyContentResponse) -> dlp.DeidentifyContentResponse: - """Post-rpc interceptor for deidentify_content - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_delete_deidentify_template(self, request: dlp.DeleteDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_deidentify_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_delete_dlp_job(self, request: dlp.DeleteDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteDlpJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_dlp_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_delete_inspect_template(self, request: dlp.DeleteInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteInspectTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_inspect_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_delete_job_trigger(self, request: dlp.DeleteJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteJobTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_job_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_delete_stored_info_type(self, request: dlp.DeleteStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_stored_info_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_finish_dlp_job(self, request: dlp.FinishDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.FinishDlpJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for finish_dlp_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_get_deidentify_template(self, request: dlp.GetDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_deidentify_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: - """Post-rpc interceptor for get_deidentify_template - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_get_dlp_job(self, request: dlp.GetDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetDlpJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_dlp_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: - """Post-rpc interceptor for get_dlp_job - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_get_inspect_template(self, request: dlp.GetInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetInspectTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_inspect_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: - """Post-rpc interceptor for get_inspect_template - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_get_job_trigger(self, request: dlp.GetJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetJobTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_job_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: - """Post-rpc interceptor for get_job_trigger - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_get_stored_info_type(self, request: dlp.GetStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_stored_info_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: - """Post-rpc interceptor for get_stored_info_type - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_hybrid_inspect_dlp_job(self, request: dlp.HybridInspectDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.HybridInspectDlpJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for hybrid_inspect_dlp_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_hybrid_inspect_dlp_job(self, response: dlp.HybridInspectResponse) -> dlp.HybridInspectResponse: - """Post-rpc interceptor for hybrid_inspect_dlp_job - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_hybrid_inspect_job_trigger(self, request: dlp.HybridInspectJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.HybridInspectJobTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for hybrid_inspect_job_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_hybrid_inspect_job_trigger(self, response: dlp.HybridInspectResponse) -> dlp.HybridInspectResponse: - """Post-rpc interceptor for hybrid_inspect_job_trigger - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_inspect_content(self, request: dlp.InspectContentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.InspectContentRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for inspect_content - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_inspect_content(self, response: dlp.InspectContentResponse) -> dlp.InspectContentResponse: - """Post-rpc interceptor for inspect_content - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_list_deidentify_templates(self, request: dlp.ListDeidentifyTemplatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListDeidentifyTemplatesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_deidentify_templates - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_deidentify_templates(self, response: dlp.ListDeidentifyTemplatesResponse) -> dlp.ListDeidentifyTemplatesResponse: - """Post-rpc interceptor for list_deidentify_templates - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_list_dlp_jobs(self, request: dlp.ListDlpJobsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListDlpJobsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_dlp_jobs - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_dlp_jobs(self, response: dlp.ListDlpJobsResponse) -> dlp.ListDlpJobsResponse: - """Post-rpc interceptor for list_dlp_jobs - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_list_info_types(self, request: dlp.ListInfoTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListInfoTypesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_info_types - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_info_types(self, response: dlp.ListInfoTypesResponse) -> dlp.ListInfoTypesResponse: - """Post-rpc interceptor for list_info_types - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_list_inspect_templates(self, request: dlp.ListInspectTemplatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListInspectTemplatesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_inspect_templates - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_inspect_templates(self, response: dlp.ListInspectTemplatesResponse) -> dlp.ListInspectTemplatesResponse: - """Post-rpc interceptor for list_inspect_templates - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_list_job_triggers(self, request: dlp.ListJobTriggersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListJobTriggersRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_job_triggers - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_job_triggers(self, response: dlp.ListJobTriggersResponse) -> dlp.ListJobTriggersResponse: - """Post-rpc interceptor for list_job_triggers - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_list_stored_info_types(self, request: dlp.ListStoredInfoTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListStoredInfoTypesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_stored_info_types - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_stored_info_types(self, response: dlp.ListStoredInfoTypesResponse) -> dlp.ListStoredInfoTypesResponse: - """Post-rpc interceptor for list_stored_info_types - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_redact_image(self, request: dlp.RedactImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.RedactImageRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for redact_image - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_redact_image(self, response: dlp.RedactImageResponse) -> dlp.RedactImageResponse: - """Post-rpc interceptor for redact_image - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_reidentify_content(self, request: dlp.ReidentifyContentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ReidentifyContentRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for reidentify_content - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_reidentify_content(self, response: dlp.ReidentifyContentResponse) -> dlp.ReidentifyContentResponse: - """Post-rpc interceptor for reidentify_content - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_update_deidentify_template(self, request: dlp.UpdateDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_deidentify_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_update_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: - """Post-rpc interceptor for update_deidentify_template - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_update_inspect_template(self, request: dlp.UpdateInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateInspectTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_inspect_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_update_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: - """Post-rpc interceptor for update_inspect_template - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_update_job_trigger(self, request: dlp.UpdateJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateJobTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_job_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_update_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: - """Post-rpc interceptor for update_job_trigger - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_update_stored_info_type(self, request: dlp.UpdateStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_stored_info_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_update_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: - """Post-rpc interceptor for update_stored_info_type - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class DlpServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: DlpServiceRestInterceptor - - -class DlpServiceRestTransport(DlpServiceTransport): - """REST backend transport for DlpService. - - The Cloud Data Loss Prevention (DLP) API is a service that - allows clients to detect the presence of Personally Identifiable - Information (PII) and other privacy-sensitive data in - user-supplied, unstructured data streams, like text blocks or - images. - The service also includes methods for sensitive data redaction - and scheduling of data scans on Google Cloud Platform based data - sets. - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[DlpServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or DlpServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _ActivateJobTrigger(DlpServiceRestStub): - def __hash__(self): - return hash("ActivateJobTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ActivateJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DlpJob: - r"""Call the activate job trigger method over HTTP. - - Args: - request (~.dlp.ActivateJobTriggerRequest): - The request object. Request message for - ActivateJobTrigger. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.DlpJob: - Combines all of the information about - a DLP job. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/jobTriggers/*}:activate', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}:activate', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_activate_job_trigger(request, metadata) - pb_request = dlp.ActivateJobTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DlpJob() - pb_resp = dlp.DlpJob.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_activate_job_trigger(resp) - return resp - - class _CancelDlpJob(DlpServiceRestStub): - def __hash__(self): - return hash("CancelDlpJob") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.CancelDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the cancel dlp job method over HTTP. - - Args: - request (~.dlp.CancelDlpJobRequest): - The request object. The request message for canceling a - DLP job. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/dlpJobs/*}:cancel', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:cancel', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_cancel_dlp_job(request, metadata) - pb_request = dlp.CancelDlpJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _CreateDeidentifyTemplate(DlpServiceRestStub): - def __hash__(self): - return hash("CreateDeidentifyTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.CreateDeidentifyTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DeidentifyTemplate: - r"""Call the create deidentify - template method over HTTP. - - Args: - request (~.dlp.CreateDeidentifyTemplateRequest): - The request object. Request message for - CreateDeidentifyTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*}/deidentifyTemplates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/deidentifyTemplates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/deidentifyTemplates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/deidentifyTemplates', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_create_deidentify_template(request, metadata) - pb_request = dlp.CreateDeidentifyTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DeidentifyTemplate() - pb_resp = dlp.DeidentifyTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_deidentify_template(resp) - return resp - - class _CreateDlpJob(DlpServiceRestStub): - def __hash__(self): - return hash("CreateDlpJob") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.CreateDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DlpJob: - r"""Call the create dlp job method over HTTP. - - Args: - request (~.dlp.CreateDlpJobRequest): - The request object. Request message for - CreateDlpJobRequest. Used to initiate - long running jobs such as calculating - risk metrics or inspecting Google Cloud - Storage. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.DlpJob: - Combines all of the information about - a DLP job. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/dlpJobs', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/dlpJobs', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_create_dlp_job(request, metadata) - pb_request = dlp.CreateDlpJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DlpJob() - pb_resp = dlp.DlpJob.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_dlp_job(resp) - return resp - - class _CreateInspectTemplate(DlpServiceRestStub): - def __hash__(self): - return hash("CreateInspectTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.CreateInspectTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.InspectTemplate: - r"""Call the create inspect template method over HTTP. - - Args: - request (~.dlp.CreateInspectTemplateRequest): - The request object. Request message for - CreateInspectTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*}/inspectTemplates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/inspectTemplates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/inspectTemplates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/inspectTemplates', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_create_inspect_template(request, metadata) - pb_request = dlp.CreateInspectTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.InspectTemplate() - pb_resp = dlp.InspectTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_inspect_template(resp) - return resp - - class _CreateJobTrigger(DlpServiceRestStub): - def __hash__(self): - return hash("CreateJobTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.CreateJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.JobTrigger: - r"""Call the create job trigger method over HTTP. - - Args: - request (~.dlp.CreateJobTriggerRequest): - The request object. Request message for CreateJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/jobTriggers', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/jobTriggers', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/jobTriggers', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_create_job_trigger(request, metadata) - pb_request = dlp.CreateJobTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.JobTrigger() - pb_resp = dlp.JobTrigger.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_job_trigger(resp) - return resp - - class _CreateStoredInfoType(DlpServiceRestStub): - def __hash__(self): - return hash("CreateStoredInfoType") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.CreateStoredInfoTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.StoredInfoType: - r"""Call the create stored info type method over HTTP. - - Args: - request (~.dlp.CreateStoredInfoTypeRequest): - The request object. Request message for - CreateStoredInfoType. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*}/storedInfoTypes', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/storedInfoTypes', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/storedInfoTypes', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/storedInfoTypes', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_create_stored_info_type(request, metadata) - pb_request = dlp.CreateStoredInfoTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.StoredInfoType() - pb_resp = dlp.StoredInfoType.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_stored_info_type(resp) - return resp - - class _DeidentifyContent(DlpServiceRestStub): - def __hash__(self): - return hash("DeidentifyContent") - - def __call__(self, - request: dlp.DeidentifyContentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DeidentifyContentResponse: - r"""Call the deidentify content method over HTTP. - - Args: - request (~.dlp.DeidentifyContentRequest): - The request object. Request to de-identify a ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.DeidentifyContentResponse: - Results of de-identifying a - ContentItem. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/content:deidentify', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/content:deidentify', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_deidentify_content(request, metadata) - pb_request = dlp.DeidentifyContentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DeidentifyContentResponse() - pb_resp = dlp.DeidentifyContentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_deidentify_content(resp) - return resp - - class _DeleteDeidentifyTemplate(DlpServiceRestStub): - def __hash__(self): - return hash("DeleteDeidentifyTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.DeleteDeidentifyTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete deidentify - template method over HTTP. - - Args: - request (~.dlp.DeleteDeidentifyTemplateRequest): - The request object. Request message for - DeleteDeidentifyTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_deidentify_template(request, metadata) - pb_request = dlp.DeleteDeidentifyTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteDlpJob(DlpServiceRestStub): - def __hash__(self): - return hash("DeleteDlpJob") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.DeleteDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete dlp job method over HTTP. - - Args: - request (~.dlp.DeleteDlpJobRequest): - The request object. The request message for deleting a - DLP job. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/dlpJobs/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_dlp_job(request, metadata) - pb_request = dlp.DeleteDlpJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteInspectTemplate(DlpServiceRestStub): - def __hash__(self): - return hash("DeleteInspectTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.DeleteInspectTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete inspect template method over HTTP. - - Args: - request (~.dlp.DeleteInspectTemplateRequest): - The request object. Request message for - DeleteInspectTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/inspectTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_inspect_template(request, metadata) - pb_request = dlp.DeleteInspectTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteJobTrigger(DlpServiceRestStub): - def __hash__(self): - return hash("DeleteJobTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.DeleteJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete job trigger method over HTTP. - - Args: - request (~.dlp.DeleteJobTriggerRequest): - The request object. Request message for DeleteJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/jobTriggers/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_job_trigger(request, metadata) - pb_request = dlp.DeleteJobTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteStoredInfoType(DlpServiceRestStub): - def __hash__(self): - return hash("DeleteStoredInfoType") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.DeleteStoredInfoTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete stored info type method over HTTP. - - Args: - request (~.dlp.DeleteStoredInfoTypeRequest): - The request object. Request message for - DeleteStoredInfoType. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_stored_info_type(request, metadata) - pb_request = dlp.DeleteStoredInfoTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _FinishDlpJob(DlpServiceRestStub): - def __hash__(self): - return hash("FinishDlpJob") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.FinishDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the finish dlp job method over HTTP. - - Args: - request (~.dlp.FinishDlpJobRequest): - The request object. The request message for finishing a - DLP hybrid job. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:finish', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_finish_dlp_job(request, metadata) - pb_request = dlp.FinishDlpJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetDeidentifyTemplate(DlpServiceRestStub): - def __hash__(self): - return hash("GetDeidentifyTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.GetDeidentifyTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DeidentifyTemplate: - r"""Call the get deidentify template method over HTTP. - - Args: - request (~.dlp.GetDeidentifyTemplateRequest): - The request object. Request message for - GetDeidentifyTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', - }, - ] - request, metadata = self._interceptor.pre_get_deidentify_template(request, metadata) - pb_request = dlp.GetDeidentifyTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DeidentifyTemplate() - pb_resp = dlp.DeidentifyTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_deidentify_template(resp) - return resp - - class _GetDlpJob(DlpServiceRestStub): - def __hash__(self): - return hash("GetDlpJob") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.GetDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DlpJob: - r"""Call the get dlp job method over HTTP. - - Args: - request (~.dlp.GetDlpJobRequest): - The request object. The request message for [DlpJobs.GetDlpJob][]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.DlpJob: - Combines all of the information about - a DLP job. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/dlpJobs/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}', - }, - ] - request, metadata = self._interceptor.pre_get_dlp_job(request, metadata) - pb_request = dlp.GetDlpJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DlpJob() - pb_resp = dlp.DlpJob.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_dlp_job(resp) - return resp - - class _GetInspectTemplate(DlpServiceRestStub): - def __hash__(self): - return hash("GetInspectTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.GetInspectTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.InspectTemplate: - r"""Call the get inspect template method over HTTP. - - Args: - request (~.dlp.GetInspectTemplateRequest): - The request object. Request message for - GetInspectTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/inspectTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', - }, - ] - request, metadata = self._interceptor.pre_get_inspect_template(request, metadata) - pb_request = dlp.GetInspectTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.InspectTemplate() - pb_resp = dlp.InspectTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_inspect_template(resp) - return resp - - class _GetJobTrigger(DlpServiceRestStub): - def __hash__(self): - return hash("GetJobTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.GetJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.JobTrigger: - r"""Call the get job trigger method over HTTP. - - Args: - request (~.dlp.GetJobTriggerRequest): - The request object. Request message for GetJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/jobTriggers/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', - }, - ] - request, metadata = self._interceptor.pre_get_job_trigger(request, metadata) - pb_request = dlp.GetJobTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.JobTrigger() - pb_resp = dlp.JobTrigger.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_job_trigger(resp) - return resp - - class _GetStoredInfoType(DlpServiceRestStub): - def __hash__(self): - return hash("GetStoredInfoType") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.GetStoredInfoTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.StoredInfoType: - r"""Call the get stored info type method over HTTP. - - Args: - request (~.dlp.GetStoredInfoTypeRequest): - The request object. Request message for - GetStoredInfoType. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', - }, - ] - request, metadata = self._interceptor.pre_get_stored_info_type(request, metadata) - pb_request = dlp.GetStoredInfoTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.StoredInfoType() - pb_resp = dlp.StoredInfoType.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_stored_info_type(resp) - return resp - - class _HybridInspectDlpJob(DlpServiceRestStub): - def __hash__(self): - return hash("HybridInspectDlpJob") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.HybridInspectDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.HybridInspectResponse: - r"""Call the hybrid inspect dlp job method over HTTP. - - Args: - request (~.dlp.HybridInspectDlpJobRequest): - The request object. Request to search for potentially - sensitive info in a custom location. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_hybrid_inspect_dlp_job(request, metadata) - pb_request = dlp.HybridInspectDlpJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.HybridInspectResponse() - pb_resp = dlp.HybridInspectResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_hybrid_inspect_dlp_job(resp) - return resp - - class _HybridInspectJobTrigger(DlpServiceRestStub): - def __hash__(self): - return hash("HybridInspectJobTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.HybridInspectJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.HybridInspectResponse: - r"""Call the hybrid inspect job - trigger method over HTTP. - - Args: - request (~.dlp.HybridInspectJobTriggerRequest): - The request object. Request to search for potentially - sensitive info in a custom location. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_hybrid_inspect_job_trigger(request, metadata) - pb_request = dlp.HybridInspectJobTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.HybridInspectResponse() - pb_resp = dlp.HybridInspectResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_hybrid_inspect_job_trigger(resp) - return resp - - class _InspectContent(DlpServiceRestStub): - def __hash__(self): - return hash("InspectContent") - - def __call__(self, - request: dlp.InspectContentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.InspectContentResponse: - r"""Call the inspect content method over HTTP. - - Args: - request (~.dlp.InspectContentRequest): - The request object. Request to search for potentially - sensitive info in a ContentItem. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.InspectContentResponse: - Results of inspecting an item. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/content:inspect', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/content:inspect', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_inspect_content(request, metadata) - pb_request = dlp.InspectContentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.InspectContentResponse() - pb_resp = dlp.InspectContentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_inspect_content(resp) - return resp - - class _ListDeidentifyTemplates(DlpServiceRestStub): - def __hash__(self): - return hash("ListDeidentifyTemplates") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ListDeidentifyTemplatesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ListDeidentifyTemplatesResponse: - r"""Call the list deidentify templates method over HTTP. - - Args: - request (~.dlp.ListDeidentifyTemplatesRequest): - The request object. Request message for - ListDeidentifyTemplates. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.ListDeidentifyTemplatesResponse: - Response message for - ListDeidentifyTemplates. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*}/deidentifyTemplates', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/deidentifyTemplates', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/deidentifyTemplates', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/deidentifyTemplates', - }, - ] - request, metadata = self._interceptor.pre_list_deidentify_templates(request, metadata) - pb_request = dlp.ListDeidentifyTemplatesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListDeidentifyTemplatesResponse() - pb_resp = dlp.ListDeidentifyTemplatesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_deidentify_templates(resp) - return resp - - class _ListDlpJobs(DlpServiceRestStub): - def __hash__(self): - return hash("ListDlpJobs") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ListDlpJobsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ListDlpJobsResponse: - r"""Call the list dlp jobs method over HTTP. - - Args: - request (~.dlp.ListDlpJobsRequest): - The request object. The request message for listing DLP - jobs. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.ListDlpJobsResponse: - The response message for listing DLP - jobs. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/dlpJobs', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/dlpJobs', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/dlpJobs', - }, - ] - request, metadata = self._interceptor.pre_list_dlp_jobs(request, metadata) - pb_request = dlp.ListDlpJobsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListDlpJobsResponse() - pb_resp = dlp.ListDlpJobsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_dlp_jobs(resp) - return resp - - class _ListInfoTypes(DlpServiceRestStub): - def __hash__(self): - return hash("ListInfoTypes") - - def __call__(self, - request: dlp.ListInfoTypesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ListInfoTypesResponse: - r"""Call the list info types method over HTTP. - - Args: - request (~.dlp.ListInfoTypesRequest): - The request object. Request for the list of infoTypes. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.ListInfoTypesResponse: - Response to the ListInfoTypes - request. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/infoTypes', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=locations/*}/infoTypes', - }, - ] - request, metadata = self._interceptor.pre_list_info_types(request, metadata) - pb_request = dlp.ListInfoTypesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListInfoTypesResponse() - pb_resp = dlp.ListInfoTypesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_info_types(resp) - return resp - - class _ListInspectTemplates(DlpServiceRestStub): - def __hash__(self): - return hash("ListInspectTemplates") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ListInspectTemplatesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ListInspectTemplatesResponse: - r"""Call the list inspect templates method over HTTP. - - Args: - request (~.dlp.ListInspectTemplatesRequest): - The request object. Request message for - ListInspectTemplates. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.ListInspectTemplatesResponse: - Response message for - ListInspectTemplates. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*}/inspectTemplates', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/inspectTemplates', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/inspectTemplates', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/inspectTemplates', - }, - ] - request, metadata = self._interceptor.pre_list_inspect_templates(request, metadata) - pb_request = dlp.ListInspectTemplatesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListInspectTemplatesResponse() - pb_resp = dlp.ListInspectTemplatesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_inspect_templates(resp) - return resp - - class _ListJobTriggers(DlpServiceRestStub): - def __hash__(self): - return hash("ListJobTriggers") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ListJobTriggersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ListJobTriggersResponse: - r"""Call the list job triggers method over HTTP. - - Args: - request (~.dlp.ListJobTriggersRequest): - The request object. Request message for ListJobTriggers. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.ListJobTriggersResponse: - Response message for ListJobTriggers. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/jobTriggers', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/jobTriggers', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/jobTriggers', - }, - ] - request, metadata = self._interceptor.pre_list_job_triggers(request, metadata) - pb_request = dlp.ListJobTriggersRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListJobTriggersResponse() - pb_resp = dlp.ListJobTriggersResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_job_triggers(resp) - return resp - - class _ListStoredInfoTypes(DlpServiceRestStub): - def __hash__(self): - return hash("ListStoredInfoTypes") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ListStoredInfoTypesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ListStoredInfoTypesResponse: - r"""Call the list stored info types method over HTTP. - - Args: - request (~.dlp.ListStoredInfoTypesRequest): - The request object. Request message for - ListStoredInfoTypes. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.ListStoredInfoTypesResponse: - Response message for - ListStoredInfoTypes. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*}/storedInfoTypes', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/storedInfoTypes', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/storedInfoTypes', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/storedInfoTypes', - }, - ] - request, metadata = self._interceptor.pre_list_stored_info_types(request, metadata) - pb_request = dlp.ListStoredInfoTypesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListStoredInfoTypesResponse() - pb_resp = dlp.ListStoredInfoTypesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_stored_info_types(resp) - return resp - - class _RedactImage(DlpServiceRestStub): - def __hash__(self): - return hash("RedactImage") - - def __call__(self, - request: dlp.RedactImageRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.RedactImageResponse: - r"""Call the redact image method over HTTP. - - Args: - request (~.dlp.RedactImageRequest): - The request object. Request to search for potentially - sensitive info in an image and redact it - by covering it with a colored rectangle. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.RedactImageResponse: - Results of redacting an image. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/image:redact', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/image:redact', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_redact_image(request, metadata) - pb_request = dlp.RedactImageRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.RedactImageResponse() - pb_resp = dlp.RedactImageResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_redact_image(resp) - return resp - - class _ReidentifyContent(DlpServiceRestStub): - def __hash__(self): - return hash("ReidentifyContent") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ReidentifyContentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ReidentifyContentResponse: - r"""Call the reidentify content method over HTTP. - - Args: - request (~.dlp.ReidentifyContentRequest): - The request object. Request to re-identify an item. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.ReidentifyContentResponse: - Results of re-identifying an item. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/content:reidentify', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/content:reidentify', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_reidentify_content(request, metadata) - pb_request = dlp.ReidentifyContentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ReidentifyContentResponse() - pb_resp = dlp.ReidentifyContentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_reidentify_content(resp) - return resp - - class _UpdateDeidentifyTemplate(DlpServiceRestStub): - def __hash__(self): - return hash("UpdateDeidentifyTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.UpdateDeidentifyTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DeidentifyTemplate: - r"""Call the update deidentify - template method over HTTP. - - Args: - request (~.dlp.UpdateDeidentifyTemplateRequest): - The request object. Request message for - UpdateDeidentifyTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_update_deidentify_template(request, metadata) - pb_request = dlp.UpdateDeidentifyTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DeidentifyTemplate() - pb_resp = dlp.DeidentifyTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_deidentify_template(resp) - return resp - - class _UpdateInspectTemplate(DlpServiceRestStub): - def __hash__(self): - return hash("UpdateInspectTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.UpdateInspectTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.InspectTemplate: - r"""Call the update inspect template method over HTTP. - - Args: - request (~.dlp.UpdateInspectTemplateRequest): - The request object. Request message for - UpdateInspectTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/inspectTemplates/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_update_inspect_template(request, metadata) - pb_request = dlp.UpdateInspectTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.InspectTemplate() - pb_resp = dlp.InspectTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_inspect_template(resp) - return resp - - class _UpdateJobTrigger(DlpServiceRestStub): - def __hash__(self): - return hash("UpdateJobTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.UpdateJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.JobTrigger: - r"""Call the update job trigger method over HTTP. - - Args: - request (~.dlp.UpdateJobTriggerRequest): - The request object. Request message for UpdateJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/jobTriggers/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_update_job_trigger(request, metadata) - pb_request = dlp.UpdateJobTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.JobTrigger() - pb_resp = dlp.JobTrigger.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_job_trigger(resp) - return resp - - class _UpdateStoredInfoType(DlpServiceRestStub): - def __hash__(self): - return hash("UpdateStoredInfoType") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.UpdateStoredInfoTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.StoredInfoType: - r"""Call the update stored info type method over HTTP. - - Args: - request (~.dlp.UpdateStoredInfoTypeRequest): - The request object. Request message for - UpdateStoredInfoType. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_update_stored_info_type(request, metadata) - pb_request = dlp.UpdateStoredInfoTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.StoredInfoType() - pb_resp = dlp.StoredInfoType.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_stored_info_type(resp) - return resp - - @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - dlp.DlpJob]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ActivateJobTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CancelDlpJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - dlp.DlpJob]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDlpJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - dlp.InspectTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - dlp.JobTrigger]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateJobTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - dlp.StoredInfoType]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore - - @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - dlp.DeidentifyContentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeidentifyContent(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDlpJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteInspectTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteJobTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteStoredInfoType(self._session, self._host, self._interceptor) # type: ignore - - @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._FinishDlpJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - dlp.DlpJob]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDlpJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - dlp.InspectTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetInspectTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - dlp.JobTrigger]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetJobTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - dlp.StoredInfoType]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetStoredInfoType(self._session, self._host, self._interceptor) # type: ignore - - @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - dlp.HybridInspectResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._HybridInspectDlpJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - dlp.HybridInspectResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._HybridInspectJobTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - dlp.InspectContentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._InspectContent(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - dlp.ListDeidentifyTemplatesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDeidentifyTemplates(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - dlp.ListDlpJobsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDlpJobs(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - dlp.ListInfoTypesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInfoTypes(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - dlp.ListInspectTemplatesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInspectTemplates(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - dlp.ListJobTriggersResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListJobTriggers(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - dlp.ListStoredInfoTypesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListStoredInfoTypes(self._session, self._host, self._interceptor) # type: ignore - - @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - dlp.RedactImageResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RedactImage(self._session, self._host, self._interceptor) # type: ignore - - @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - dlp.ReidentifyContentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ReidentifyContent(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - dlp.InspectTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - dlp.JobTrigger]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateJobTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - dlp.StoredInfoType]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'DlpServiceRestTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py deleted file mode 100644 index 5bc3d949..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py +++ /dev/null @@ -1,390 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .dlp import ( - Action, - ActivateJobTriggerRequest, - AnalyzeDataSourceRiskDetails, - BoundingBox, - BucketingConfig, - ByteContentItem, - CancelDlpJobRequest, - CharacterMaskConfig, - CharsToIgnore, - Color, - Container, - ContentItem, - ContentLocation, - CreateDeidentifyTemplateRequest, - CreateDlpJobRequest, - CreateInspectTemplateRequest, - CreateJobTriggerRequest, - CreateStoredInfoTypeRequest, - CryptoDeterministicConfig, - CryptoHashConfig, - CryptoKey, - CryptoReplaceFfxFpeConfig, - DataProfileAction, - DataProfileConfigSnapshot, - DataProfileJobConfig, - DataProfileLocation, - DataProfilePubSubCondition, - DataProfilePubSubMessage, - DataRiskLevel, - DateShiftConfig, - DateTime, - DeidentifyConfig, - DeidentifyContentRequest, - DeidentifyContentResponse, - DeidentifyTemplate, - DeleteDeidentifyTemplateRequest, - DeleteDlpJobRequest, - DeleteInspectTemplateRequest, - DeleteJobTriggerRequest, - DeleteStoredInfoTypeRequest, - DlpJob, - DocumentLocation, - Error, - ExcludeByHotword, - ExcludeInfoTypes, - ExclusionRule, - FieldTransformation, - Finding, - FinishDlpJobRequest, - FixedSizeBucketingConfig, - GetDeidentifyTemplateRequest, - GetDlpJobRequest, - GetInspectTemplateRequest, - GetJobTriggerRequest, - GetStoredInfoTypeRequest, - HybridContentItem, - HybridFindingDetails, - HybridInspectDlpJobRequest, - HybridInspectJobTriggerRequest, - HybridInspectResponse, - HybridInspectStatistics, - ImageLocation, - ImageTransformations, - InfoTypeCategory, - InfoTypeDescription, - InfoTypeStats, - InfoTypeSummary, - InfoTypeTransformations, - InspectConfig, - InspectContentRequest, - InspectContentResponse, - InspectDataSourceDetails, - InspectionRule, - InspectionRuleSet, - InspectJobConfig, - InspectResult, - InspectTemplate, - JobTrigger, - KmsWrappedCryptoKey, - LargeCustomDictionaryConfig, - LargeCustomDictionaryStats, - ListDeidentifyTemplatesRequest, - ListDeidentifyTemplatesResponse, - ListDlpJobsRequest, - ListDlpJobsResponse, - ListInfoTypesRequest, - ListInfoTypesResponse, - ListInspectTemplatesRequest, - ListInspectTemplatesResponse, - ListJobTriggersRequest, - ListJobTriggersResponse, - ListStoredInfoTypesRequest, - ListStoredInfoTypesResponse, - Location, - Manual, - MetadataLocation, - OtherInfoTypeSummary, - OutputStorageConfig, - PrimitiveTransformation, - PrivacyMetric, - ProfileStatus, - QuasiId, - QuoteInfo, - Range, - RecordCondition, - RecordLocation, - RecordSuppression, - RecordTransformation, - RecordTransformations, - RedactConfig, - RedactImageRequest, - RedactImageResponse, - ReidentifyContentRequest, - ReidentifyContentResponse, - ReplaceDictionaryConfig, - ReplaceValueConfig, - ReplaceWithInfoTypeConfig, - RiskAnalysisJobConfig, - Schedule, - StatisticalTable, - StorageMetadataLabel, - StoredInfoType, - StoredInfoTypeConfig, - StoredInfoTypeStats, - StoredInfoTypeVersion, - Table, - TableDataProfile, - TableLocation, - TimePartConfig, - TransformationConfig, - TransformationDescription, - TransformationDetails, - TransformationDetailsStorageConfig, - TransformationErrorHandling, - TransformationLocation, - TransformationOverview, - TransformationResultStatus, - TransformationSummary, - TransientCryptoKey, - UnwrappedCryptoKey, - UpdateDeidentifyTemplateRequest, - UpdateInspectTemplateRequest, - UpdateJobTriggerRequest, - UpdateStoredInfoTypeRequest, - Value, - ValueFrequency, - VersionDescription, - ContentOption, - DlpJobType, - EncryptionStatus, - InfoTypeSupportedBy, - MatchingType, - MetadataType, - RelationalOperator, - ResourceVisibility, - StoredInfoTypeState, - TransformationContainerType, - TransformationResultStatusType, - TransformationType, -) -from .storage import ( - BigQueryField, - BigQueryKey, - BigQueryOptions, - BigQueryTable, - CloudStorageFileSet, - CloudStorageOptions, - CloudStoragePath, - CloudStorageRegexFileSet, - CustomInfoType, - DatastoreKey, - DatastoreOptions, - EntityId, - FieldId, - HybridOptions, - InfoType, - Key, - KindExpression, - PartitionId, - RecordKey, - SensitivityScore, - StorageConfig, - StoredType, - TableOptions, - FileType, - Likelihood, -) - -__all__ = ( - 'Action', - 'ActivateJobTriggerRequest', - 'AnalyzeDataSourceRiskDetails', - 'BoundingBox', - 'BucketingConfig', - 'ByteContentItem', - 'CancelDlpJobRequest', - 'CharacterMaskConfig', - 'CharsToIgnore', - 'Color', - 'Container', - 'ContentItem', - 'ContentLocation', - 'CreateDeidentifyTemplateRequest', - 'CreateDlpJobRequest', - 'CreateInspectTemplateRequest', - 'CreateJobTriggerRequest', - 'CreateStoredInfoTypeRequest', - 'CryptoDeterministicConfig', - 'CryptoHashConfig', - 'CryptoKey', - 'CryptoReplaceFfxFpeConfig', - 'DataProfileAction', - 'DataProfileConfigSnapshot', - 'DataProfileJobConfig', - 'DataProfileLocation', - 'DataProfilePubSubCondition', - 'DataProfilePubSubMessage', - 'DataRiskLevel', - 'DateShiftConfig', - 'DateTime', - 'DeidentifyConfig', - 'DeidentifyContentRequest', - 'DeidentifyContentResponse', - 'DeidentifyTemplate', - 'DeleteDeidentifyTemplateRequest', - 'DeleteDlpJobRequest', - 'DeleteInspectTemplateRequest', - 'DeleteJobTriggerRequest', - 'DeleteStoredInfoTypeRequest', - 'DlpJob', - 'DocumentLocation', - 'Error', - 'ExcludeByHotword', - 'ExcludeInfoTypes', - 'ExclusionRule', - 'FieldTransformation', - 'Finding', - 'FinishDlpJobRequest', - 'FixedSizeBucketingConfig', - 'GetDeidentifyTemplateRequest', - 'GetDlpJobRequest', - 'GetInspectTemplateRequest', - 'GetJobTriggerRequest', - 'GetStoredInfoTypeRequest', - 'HybridContentItem', - 'HybridFindingDetails', - 'HybridInspectDlpJobRequest', - 'HybridInspectJobTriggerRequest', - 'HybridInspectResponse', - 'HybridInspectStatistics', - 'ImageLocation', - 'ImageTransformations', - 'InfoTypeCategory', - 'InfoTypeDescription', - 'InfoTypeStats', - 'InfoTypeSummary', - 'InfoTypeTransformations', - 'InspectConfig', - 'InspectContentRequest', - 'InspectContentResponse', - 'InspectDataSourceDetails', - 'InspectionRule', - 'InspectionRuleSet', - 'InspectJobConfig', - 'InspectResult', - 'InspectTemplate', - 'JobTrigger', - 'KmsWrappedCryptoKey', - 'LargeCustomDictionaryConfig', - 'LargeCustomDictionaryStats', - 'ListDeidentifyTemplatesRequest', - 'ListDeidentifyTemplatesResponse', - 'ListDlpJobsRequest', - 'ListDlpJobsResponse', - 'ListInfoTypesRequest', - 'ListInfoTypesResponse', - 'ListInspectTemplatesRequest', - 'ListInspectTemplatesResponse', - 'ListJobTriggersRequest', - 'ListJobTriggersResponse', - 'ListStoredInfoTypesRequest', - 'ListStoredInfoTypesResponse', - 'Location', - 'Manual', - 'MetadataLocation', - 'OtherInfoTypeSummary', - 'OutputStorageConfig', - 'PrimitiveTransformation', - 'PrivacyMetric', - 'ProfileStatus', - 'QuasiId', - 'QuoteInfo', - 'Range', - 'RecordCondition', - 'RecordLocation', - 'RecordSuppression', - 'RecordTransformation', - 'RecordTransformations', - 'RedactConfig', - 'RedactImageRequest', - 'RedactImageResponse', - 'ReidentifyContentRequest', - 'ReidentifyContentResponse', - 'ReplaceDictionaryConfig', - 'ReplaceValueConfig', - 'ReplaceWithInfoTypeConfig', - 'RiskAnalysisJobConfig', - 'Schedule', - 'StatisticalTable', - 'StorageMetadataLabel', - 'StoredInfoType', - 'StoredInfoTypeConfig', - 'StoredInfoTypeStats', - 'StoredInfoTypeVersion', - 'Table', - 'TableDataProfile', - 'TableLocation', - 'TimePartConfig', - 'TransformationConfig', - 'TransformationDescription', - 'TransformationDetails', - 'TransformationDetailsStorageConfig', - 'TransformationErrorHandling', - 'TransformationLocation', - 'TransformationOverview', - 'TransformationResultStatus', - 'TransformationSummary', - 'TransientCryptoKey', - 'UnwrappedCryptoKey', - 'UpdateDeidentifyTemplateRequest', - 'UpdateInspectTemplateRequest', - 'UpdateJobTriggerRequest', - 'UpdateStoredInfoTypeRequest', - 'Value', - 'ValueFrequency', - 'VersionDescription', - 'ContentOption', - 'DlpJobType', - 'EncryptionStatus', - 'InfoTypeSupportedBy', - 'MatchingType', - 'MetadataType', - 'RelationalOperator', - 'ResourceVisibility', - 'StoredInfoTypeState', - 'TransformationContainerType', - 'TransformationResultStatusType', - 'TransformationType', - 'BigQueryField', - 'BigQueryKey', - 'BigQueryOptions', - 'BigQueryTable', - 'CloudStorageFileSet', - 'CloudStorageOptions', - 'CloudStoragePath', - 'CloudStorageRegexFileSet', - 'CustomInfoType', - 'DatastoreKey', - 'DatastoreOptions', - 'EntityId', - 'FieldId', - 'HybridOptions', - 'InfoType', - 'Key', - 'KindExpression', - 'PartitionId', - 'RecordKey', - 'SensitivityScore', - 'StorageConfig', - 'StoredType', - 'TableOptions', - 'FileType', - 'Likelihood', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py deleted file mode 100644 index d82444a2..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py +++ /dev/null @@ -1,8846 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dlp_v2.types import storage -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import date_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore -from google.type import timeofday_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.privacy.dlp.v2', - manifest={ - 'TransformationResultStatusType', - 'TransformationContainerType', - 'TransformationType', - 'RelationalOperator', - 'MatchingType', - 'ContentOption', - 'MetadataType', - 'InfoTypeSupportedBy', - 'DlpJobType', - 'StoredInfoTypeState', - 'ResourceVisibility', - 'EncryptionStatus', - 'ExcludeInfoTypes', - 'ExcludeByHotword', - 'ExclusionRule', - 'InspectionRule', - 'InspectionRuleSet', - 'InspectConfig', - 'ByteContentItem', - 'ContentItem', - 'Table', - 'InspectResult', - 'Finding', - 'Location', - 'ContentLocation', - 'MetadataLocation', - 'StorageMetadataLabel', - 'DocumentLocation', - 'RecordLocation', - 'TableLocation', - 'Container', - 'Range', - 'ImageLocation', - 'BoundingBox', - 'RedactImageRequest', - 'Color', - 'RedactImageResponse', - 'DeidentifyContentRequest', - 'DeidentifyContentResponse', - 'ReidentifyContentRequest', - 'ReidentifyContentResponse', - 'InspectContentRequest', - 'InspectContentResponse', - 'OutputStorageConfig', - 'InfoTypeStats', - 'InspectDataSourceDetails', - 'HybridInspectStatistics', - 'InfoTypeDescription', - 'InfoTypeCategory', - 'VersionDescription', - 'ListInfoTypesRequest', - 'ListInfoTypesResponse', - 'RiskAnalysisJobConfig', - 'QuasiId', - 'StatisticalTable', - 'PrivacyMetric', - 'AnalyzeDataSourceRiskDetails', - 'ValueFrequency', - 'Value', - 'QuoteInfo', - 'DateTime', - 'DeidentifyConfig', - 'ImageTransformations', - 'TransformationErrorHandling', - 'PrimitiveTransformation', - 'TimePartConfig', - 'CryptoHashConfig', - 'CryptoDeterministicConfig', - 'ReplaceValueConfig', - 'ReplaceDictionaryConfig', - 'ReplaceWithInfoTypeConfig', - 'RedactConfig', - 'CharsToIgnore', - 'CharacterMaskConfig', - 'FixedSizeBucketingConfig', - 'BucketingConfig', - 'CryptoReplaceFfxFpeConfig', - 'CryptoKey', - 'TransientCryptoKey', - 'UnwrappedCryptoKey', - 'KmsWrappedCryptoKey', - 'DateShiftConfig', - 'InfoTypeTransformations', - 'FieldTransformation', - 'RecordTransformations', - 'RecordSuppression', - 'RecordCondition', - 'TransformationOverview', - 'TransformationSummary', - 'TransformationDescription', - 'TransformationDetails', - 'TransformationLocation', - 'RecordTransformation', - 'TransformationResultStatus', - 'TransformationDetailsStorageConfig', - 'Schedule', - 'Manual', - 'InspectTemplate', - 'DeidentifyTemplate', - 'Error', - 'JobTrigger', - 'Action', - 'TransformationConfig', - 'CreateInspectTemplateRequest', - 'UpdateInspectTemplateRequest', - 'GetInspectTemplateRequest', - 'ListInspectTemplatesRequest', - 'ListInspectTemplatesResponse', - 'DeleteInspectTemplateRequest', - 'CreateJobTriggerRequest', - 'ActivateJobTriggerRequest', - 'UpdateJobTriggerRequest', - 'GetJobTriggerRequest', - 'CreateDlpJobRequest', - 'ListJobTriggersRequest', - 'ListJobTriggersResponse', - 'DeleteJobTriggerRequest', - 'InspectJobConfig', - 'DataProfileAction', - 'DataProfileJobConfig', - 'DataProfileLocation', - 'DlpJob', - 'GetDlpJobRequest', - 'ListDlpJobsRequest', - 'ListDlpJobsResponse', - 'CancelDlpJobRequest', - 'FinishDlpJobRequest', - 'DeleteDlpJobRequest', - 'CreateDeidentifyTemplateRequest', - 'UpdateDeidentifyTemplateRequest', - 'GetDeidentifyTemplateRequest', - 'ListDeidentifyTemplatesRequest', - 'ListDeidentifyTemplatesResponse', - 'DeleteDeidentifyTemplateRequest', - 'LargeCustomDictionaryConfig', - 'LargeCustomDictionaryStats', - 'StoredInfoTypeConfig', - 'StoredInfoTypeStats', - 'StoredInfoTypeVersion', - 'StoredInfoType', - 'CreateStoredInfoTypeRequest', - 'UpdateStoredInfoTypeRequest', - 'GetStoredInfoTypeRequest', - 'ListStoredInfoTypesRequest', - 'ListStoredInfoTypesResponse', - 'DeleteStoredInfoTypeRequest', - 'HybridInspectJobTriggerRequest', - 'HybridInspectDlpJobRequest', - 'HybridContentItem', - 'HybridFindingDetails', - 'HybridInspectResponse', - 'DataRiskLevel', - 'DataProfileConfigSnapshot', - 'TableDataProfile', - 'ProfileStatus', - 'InfoTypeSummary', - 'OtherInfoTypeSummary', - 'DataProfilePubSubCondition', - 'DataProfilePubSubMessage', - }, -) - - -class TransformationResultStatusType(proto.Enum): - r"""Enum of possible outcomes of transformations. SUCCESS if - transformation and storing of transformation was successful, - otherwise, reason for not transforming. - - Values: - STATE_TYPE_UNSPECIFIED (0): - No description available. - INVALID_TRANSFORM (1): - This will be set when a finding could not be - transformed (i.e. outside user set bucket - range). - BIGQUERY_MAX_ROW_SIZE_EXCEEDED (2): - This will be set when a BigQuery - transformation was successful but could not be - stored back in BigQuery because the transformed - row exceeds BigQuery's max row size. - METADATA_UNRETRIEVABLE (3): - This will be set when there is a finding in - the custom metadata of a file, but at the write - time of the transformed file, this key / value - pair is unretrievable. - SUCCESS (4): - This will be set when the transformation and - storing of it is successful. - """ - STATE_TYPE_UNSPECIFIED = 0 - INVALID_TRANSFORM = 1 - BIGQUERY_MAX_ROW_SIZE_EXCEEDED = 2 - METADATA_UNRETRIEVABLE = 3 - SUCCESS = 4 - - -class TransformationContainerType(proto.Enum): - r"""Describes functionality of a given container in its original - format. - - Values: - TRANSFORM_UNKNOWN_CONTAINER (0): - No description available. - TRANSFORM_BODY (1): - No description available. - TRANSFORM_METADATA (2): - No description available. - TRANSFORM_TABLE (3): - No description available. - """ - TRANSFORM_UNKNOWN_CONTAINER = 0 - TRANSFORM_BODY = 1 - TRANSFORM_METADATA = 2 - TRANSFORM_TABLE = 3 - - -class TransformationType(proto.Enum): - r"""An enum of rules that can be used to transform a value. Can be a - record suppression, or one of the transformation rules specified - under ``PrimitiveTransformation``. - - Values: - TRANSFORMATION_TYPE_UNSPECIFIED (0): - Unused - RECORD_SUPPRESSION (1): - Record suppression - REPLACE_VALUE (2): - Replace value - REPLACE_DICTIONARY (15): - Replace value using a dictionary. - REDACT (3): - Redact - CHARACTER_MASK (4): - Character mask - CRYPTO_REPLACE_FFX_FPE (5): - FFX-FPE - FIXED_SIZE_BUCKETING (6): - Fixed size bucketing - BUCKETING (7): - Bucketing - REPLACE_WITH_INFO_TYPE (8): - Replace with info type - TIME_PART (9): - Time part - CRYPTO_HASH (10): - Crypto hash - DATE_SHIFT (12): - Date shift - CRYPTO_DETERMINISTIC_CONFIG (13): - Deterministic crypto - REDACT_IMAGE (14): - Redact image - """ - TRANSFORMATION_TYPE_UNSPECIFIED = 0 - RECORD_SUPPRESSION = 1 - REPLACE_VALUE = 2 - REPLACE_DICTIONARY = 15 - REDACT = 3 - CHARACTER_MASK = 4 - CRYPTO_REPLACE_FFX_FPE = 5 - FIXED_SIZE_BUCKETING = 6 - BUCKETING = 7 - REPLACE_WITH_INFO_TYPE = 8 - TIME_PART = 9 - CRYPTO_HASH = 10 - DATE_SHIFT = 12 - CRYPTO_DETERMINISTIC_CONFIG = 13 - REDACT_IMAGE = 14 - - -class RelationalOperator(proto.Enum): - r"""Operators available for comparing the value of fields. - - Values: - RELATIONAL_OPERATOR_UNSPECIFIED (0): - Unused - EQUAL_TO (1): - Equal. Attempts to match even with - incompatible types. - NOT_EQUAL_TO (2): - Not equal to. Attempts to match even with - incompatible types. - GREATER_THAN (3): - Greater than. - LESS_THAN (4): - Less than. - GREATER_THAN_OR_EQUALS (5): - Greater than or equals. - LESS_THAN_OR_EQUALS (6): - Less than or equals. - EXISTS (7): - Exists - """ - RELATIONAL_OPERATOR_UNSPECIFIED = 0 - EQUAL_TO = 1 - NOT_EQUAL_TO = 2 - GREATER_THAN = 3 - LESS_THAN = 4 - GREATER_THAN_OR_EQUALS = 5 - LESS_THAN_OR_EQUALS = 6 - EXISTS = 7 - - -class MatchingType(proto.Enum): - r"""Type of the match which can be applied to different ways of - matching, like Dictionary, regular expression and intersecting - with findings of another info type. - - Values: - MATCHING_TYPE_UNSPECIFIED (0): - Invalid. - MATCHING_TYPE_FULL_MATCH (1): - Full match. - - Dictionary: join of Dictionary results matched - complete finding quote - Regex: all regex - matches fill a finding quote start to end - - Exclude info type: completely inside affecting - info types findings - MATCHING_TYPE_PARTIAL_MATCH (2): - Partial match. - - Dictionary: at least one of the tokens in the - finding matches - Regex: substring of the - finding matches - - Exclude info type: intersects with affecting - info types findings - MATCHING_TYPE_INVERSE_MATCH (3): - Inverse match. - - Dictionary: no tokens in the finding match the - dictionary - Regex: finding doesn't match the - regex - - Exclude info type: no intersection with - affecting info types findings - """ - MATCHING_TYPE_UNSPECIFIED = 0 - MATCHING_TYPE_FULL_MATCH = 1 - MATCHING_TYPE_PARTIAL_MATCH = 2 - MATCHING_TYPE_INVERSE_MATCH = 3 - - -class ContentOption(proto.Enum): - r"""Deprecated and unused. - - Values: - CONTENT_UNSPECIFIED (0): - Includes entire content of a file or a data - stream. - CONTENT_TEXT (1): - Text content within the data, excluding any - metadata. - CONTENT_IMAGE (2): - Images found in the data. - """ - CONTENT_UNSPECIFIED = 0 - CONTENT_TEXT = 1 - CONTENT_IMAGE = 2 - - -class MetadataType(proto.Enum): - r"""Type of metadata containing the finding. - - Values: - METADATATYPE_UNSPECIFIED (0): - Unused - STORAGE_METADATA (2): - General file metadata provided by Cloud - Storage. - """ - METADATATYPE_UNSPECIFIED = 0 - STORAGE_METADATA = 2 - - -class InfoTypeSupportedBy(proto.Enum): - r"""Parts of the APIs which use certain infoTypes. - - Values: - ENUM_TYPE_UNSPECIFIED (0): - Unused. - INSPECT (1): - Supported by the inspect operations. - RISK_ANALYSIS (2): - Supported by the risk analysis operations. - """ - ENUM_TYPE_UNSPECIFIED = 0 - INSPECT = 1 - RISK_ANALYSIS = 2 - - -class DlpJobType(proto.Enum): - r"""An enum to represent the various types of DLP jobs. - - Values: - DLP_JOB_TYPE_UNSPECIFIED (0): - Defaults to INSPECT_JOB. - INSPECT_JOB (1): - The job inspected Google Cloud for sensitive - data. - RISK_ANALYSIS_JOB (2): - The job executed a Risk Analysis computation. - """ - DLP_JOB_TYPE_UNSPECIFIED = 0 - INSPECT_JOB = 1 - RISK_ANALYSIS_JOB = 2 - - -class StoredInfoTypeState(proto.Enum): - r"""State of a StoredInfoType version. - - Values: - STORED_INFO_TYPE_STATE_UNSPECIFIED (0): - Unused - PENDING (1): - StoredInfoType version is being created. - READY (2): - StoredInfoType version is ready for use. - FAILED (3): - StoredInfoType creation failed. All relevant error messages - are returned in the ``StoredInfoTypeVersion`` message. - INVALID (4): - StoredInfoType is no longer valid because artifacts stored - in user-controlled storage were modified. To fix an invalid - StoredInfoType, use the ``UpdateStoredInfoType`` method to - create a new version. - """ - STORED_INFO_TYPE_STATE_UNSPECIFIED = 0 - PENDING = 1 - READY = 2 - FAILED = 3 - INVALID = 4 - - -class ResourceVisibility(proto.Enum): - r"""How broadly a resource has been shared. New items may be - added over time. A higher number means more restricted. - - Values: - RESOURCE_VISIBILITY_UNSPECIFIED (0): - Unused. - RESOURCE_VISIBILITY_PUBLIC (10): - Visible to any user. - RESOURCE_VISIBILITY_RESTRICTED (20): - Visible only to specific users. - """ - RESOURCE_VISIBILITY_UNSPECIFIED = 0 - RESOURCE_VISIBILITY_PUBLIC = 10 - RESOURCE_VISIBILITY_RESTRICTED = 20 - - -class EncryptionStatus(proto.Enum): - r"""How a resource is encrypted. - - Values: - ENCRYPTION_STATUS_UNSPECIFIED (0): - Unused. - ENCRYPTION_GOOGLE_MANAGED (1): - Google manages server-side encryption keys on - your behalf. - ENCRYPTION_CUSTOMER_MANAGED (2): - Customer provides the key. - """ - ENCRYPTION_STATUS_UNSPECIFIED = 0 - ENCRYPTION_GOOGLE_MANAGED = 1 - ENCRYPTION_CUSTOMER_MANAGED = 2 - - -class ExcludeInfoTypes(proto.Message): - r"""List of excluded infoTypes. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - InfoType list in ExclusionRule rule drops a finding when it - overlaps or contained within with a finding of an infoType - from this list. For example, for - ``InspectionRuleSet.info_types`` containing - "PHONE_NUMBER"``and``\ exclusion_rule\ ``containing``\ exclude_info_types.info_types\` - with "EMAIL_ADDRESS" the phone number findings are dropped - if they overlap with EMAIL_ADDRESS finding. That leads to - "555-222-2222@example.org" to generate only a single - finding, namely email address. - """ - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - - -class ExcludeByHotword(proto.Message): - r"""The rule to exclude findings based on a hotword. For record - inspection of tables, column names are considered hotwords. An - example of this is to exclude a finding if a BigQuery column - matches a specific pattern. - - Attributes: - hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression pattern defining what - qualifies as a hotword. - proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): - Range of characters within which the entire - hotword must reside. The total length of the - window cannot exceed 1000 characters. The - windowBefore property in proximity should be set - to 1 if the hotword needs to be included in a - column header. - """ - - hotword_regex: storage.CustomInfoType.Regex = proto.Field( - proto.MESSAGE, - number=1, - message=storage.CustomInfoType.Regex, - ) - proximity: storage.CustomInfoType.DetectionRule.Proximity = proto.Field( - proto.MESSAGE, - number=2, - message=storage.CustomInfoType.DetectionRule.Proximity, - ) - - -class ExclusionRule(proto.Message): - r"""The rule that specifies conditions when findings of infoTypes - specified in ``InspectionRuleSet`` are removed from results. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): - Dictionary which defines the rule. - - This field is a member of `oneof`_ ``type``. - regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression which defines the rule. - - This field is a member of `oneof`_ ``type``. - exclude_info_types (google.cloud.dlp_v2.types.ExcludeInfoTypes): - Set of infoTypes for which findings would - affect this rule. - - This field is a member of `oneof`_ ``type``. - exclude_by_hotword (google.cloud.dlp_v2.types.ExcludeByHotword): - Drop if the hotword rule is contained in the - proximate context. For tabular data, the context - includes the column name. - - This field is a member of `oneof`_ ``type``. - matching_type (google.cloud.dlp_v2.types.MatchingType): - How the rule is applied, see MatchingType - documentation for details. - """ - - dictionary: storage.CustomInfoType.Dictionary = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.CustomInfoType.Dictionary, - ) - regex: storage.CustomInfoType.Regex = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message=storage.CustomInfoType.Regex, - ) - exclude_info_types: 'ExcludeInfoTypes' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='ExcludeInfoTypes', - ) - exclude_by_hotword: 'ExcludeByHotword' = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message='ExcludeByHotword', - ) - matching_type: 'MatchingType' = proto.Field( - proto.ENUM, - number=4, - enum='MatchingType', - ) - - -class InspectionRule(proto.Message): - r"""A single inspection rule to be applied to infoTypes, specified in - ``InspectionRuleSet``. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): - Hotword-based detection rule. - - This field is a member of `oneof`_ ``type``. - exclusion_rule (google.cloud.dlp_v2.types.ExclusionRule): - Exclusion rule. - - This field is a member of `oneof`_ ``type``. - """ - - hotword_rule: storage.CustomInfoType.DetectionRule.HotwordRule = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.CustomInfoType.DetectionRule.HotwordRule, - ) - exclusion_rule: 'ExclusionRule' = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message='ExclusionRule', - ) - - -class InspectionRuleSet(proto.Message): - r"""Rule set for modifying a set of infoTypes to alter behavior - under certain circumstances, depending on the specific details - of the rules within the set. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - List of infoTypes this rule set is applied - to. - rules (MutableSequence[google.cloud.dlp_v2.types.InspectionRule]): - Set of rules to be applied to infoTypes. The - rules are applied in order. - """ - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - rules: MutableSequence['InspectionRule'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='InspectionRule', - ) - - -class InspectConfig(proto.Message): - r"""Configuration description of the scanning process. When used with - redactContent only info_types and min_likelihood are currently used. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - Restricts what info_types to look for. The values must - correspond to InfoType values returned by ListInfoTypes or - listed at - https://cloud.google.com/dlp/docs/infotypes-reference. - - When no InfoTypes or CustomInfoTypes are specified in a - request, the system may automatically choose what detectors - to run. By default this may be all types, but may change - over time as detectors are updated. - - If you need precise control and predictability as to what - detectors are run you should specify specific InfoTypes - listed in the reference, otherwise a default list will be - used, which may change over time. - min_likelihood (google.cloud.dlp_v2.types.Likelihood): - Only returns findings equal or above this - threshold. The default is POSSIBLE. - See https://cloud.google.com/dlp/docs/likelihood - to learn more. - limits (google.cloud.dlp_v2.types.InspectConfig.FindingLimits): - Configuration to control the number of findings returned. - This is not used for data profiling. - - When redacting sensitive data from images, finding limits - don't apply. They can cause unexpected or inconsistent - results, where only some data is redacted. Don't include - finding limits in - [RedactImage][google.privacy.dlp.v2.DlpService.RedactImage] - requests. Otherwise, Cloud DLP returns an error. - include_quote (bool): - When true, a contextual quote from the data that triggered a - finding is included in the response; see - [Finding.quote][google.privacy.dlp.v2.Finding.quote]. This - is not used for data profiling. - exclude_info_types (bool): - When true, excludes type information of the - findings. This is not used for data profiling. - custom_info_types (MutableSequence[google.cloud.dlp_v2.types.CustomInfoType]): - CustomInfoTypes provided by the user. See - https://cloud.google.com/dlp/docs/creating-custom-infotypes - to learn more. - content_options (MutableSequence[google.cloud.dlp_v2.types.ContentOption]): - Deprecated and unused. - rule_set (MutableSequence[google.cloud.dlp_v2.types.InspectionRuleSet]): - Set of rules to apply to the findings for - this InspectConfig. Exclusion rules, contained - in the set are executed in the end, other rules - are executed in the order they are specified for - each info type. - """ - - class FindingLimits(proto.Message): - r"""Configuration to control the number of findings returned for - inspection. This is not used for de-identification or data - profiling. - - When redacting sensitive data from images, finding limits don't - apply. They can cause unexpected or inconsistent results, where only - some data is redacted. Don't include finding limits in - [RedactImage][google.privacy.dlp.v2.DlpService.RedactImage] - requests. Otherwise, Cloud DLP returns an error. - - Attributes: - max_findings_per_item (int): - Max number of findings that will be returned for each item - scanned. When set within ``InspectJobConfig``, the maximum - returned is 2000 regardless if this is set higher. When set - within ``InspectContentRequest``, this field is ignored. - max_findings_per_request (int): - Max number of findings that will be returned per - request/job. When set within ``InspectContentRequest``, the - maximum returned is 2000 regardless if this is set higher. - max_findings_per_info_type (MutableSequence[google.cloud.dlp_v2.types.InspectConfig.FindingLimits.InfoTypeLimit]): - Configuration of findings limit given for - specified infoTypes. - """ - - class InfoTypeLimit(proto.Message): - r"""Max findings configuration per infoType, per content item or - long running DlpJob. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - Type of information the findings limit applies to. Only one - limit per info_type should be provided. If InfoTypeLimit - does not have an info_type, the DLP API applies the limit - against all info_types that are found but not specified in - another InfoTypeLimit. - max_findings (int): - Max findings limit for the given infoType. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - max_findings: int = proto.Field( - proto.INT32, - number=2, - ) - - max_findings_per_item: int = proto.Field( - proto.INT32, - number=1, - ) - max_findings_per_request: int = proto.Field( - proto.INT32, - number=2, - ) - max_findings_per_info_type: MutableSequence['InspectConfig.FindingLimits.InfoTypeLimit'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='InspectConfig.FindingLimits.InfoTypeLimit', - ) - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - min_likelihood: storage.Likelihood = proto.Field( - proto.ENUM, - number=2, - enum=storage.Likelihood, - ) - limits: FindingLimits = proto.Field( - proto.MESSAGE, - number=3, - message=FindingLimits, - ) - include_quote: bool = proto.Field( - proto.BOOL, - number=4, - ) - exclude_info_types: bool = proto.Field( - proto.BOOL, - number=5, - ) - custom_info_types: MutableSequence[storage.CustomInfoType] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message=storage.CustomInfoType, - ) - content_options: MutableSequence['ContentOption'] = proto.RepeatedField( - proto.ENUM, - number=8, - enum='ContentOption', - ) - rule_set: MutableSequence['InspectionRuleSet'] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='InspectionRuleSet', - ) - - -class ByteContentItem(proto.Message): - r"""Container for bytes to inspect or redact. - - Attributes: - type_ (google.cloud.dlp_v2.types.ByteContentItem.BytesType): - The type of data stored in the bytes string. Default will be - TEXT_UTF8. - data (bytes): - Content data to inspect or redact. - """ - class BytesType(proto.Enum): - r"""The type of data being sent for inspection. To learn more, see - `Supported file - types `__. - - Values: - BYTES_TYPE_UNSPECIFIED (0): - Unused - IMAGE (6): - Any image type. - IMAGE_JPEG (1): - jpeg - IMAGE_BMP (2): - bmp - IMAGE_PNG (3): - png - IMAGE_SVG (4): - svg - TEXT_UTF8 (5): - plain text - WORD_DOCUMENT (7): - docx, docm, dotx, dotm - PDF (8): - pdf - POWERPOINT_DOCUMENT (9): - pptx, pptm, potx, potm, pot - EXCEL_DOCUMENT (10): - xlsx, xlsm, xltx, xltm - AVRO (11): - avro - CSV (12): - csv - TSV (13): - tsv - """ - BYTES_TYPE_UNSPECIFIED = 0 - IMAGE = 6 - IMAGE_JPEG = 1 - IMAGE_BMP = 2 - IMAGE_PNG = 3 - IMAGE_SVG = 4 - TEXT_UTF8 = 5 - WORD_DOCUMENT = 7 - PDF = 8 - POWERPOINT_DOCUMENT = 9 - EXCEL_DOCUMENT = 10 - AVRO = 11 - CSV = 12 - TSV = 13 - - type_: BytesType = proto.Field( - proto.ENUM, - number=1, - enum=BytesType, - ) - data: bytes = proto.Field( - proto.BYTES, - number=2, - ) - - -class ContentItem(proto.Message): - r""" - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - value (str): - String data to inspect or redact. - - This field is a member of `oneof`_ ``data_item``. - table (google.cloud.dlp_v2.types.Table): - Structured content for inspection. See - https://cloud.google.com/dlp/docs/inspecting-text#inspecting_a_table - to learn more. - - This field is a member of `oneof`_ ``data_item``. - byte_item (google.cloud.dlp_v2.types.ByteContentItem): - Content data to inspect or redact. Replaces ``type`` and - ``data``. - - This field is a member of `oneof`_ ``data_item``. - """ - - value: str = proto.Field( - proto.STRING, - number=3, - oneof='data_item', - ) - table: 'Table' = proto.Field( - proto.MESSAGE, - number=4, - oneof='data_item', - message='Table', - ) - byte_item: 'ByteContentItem' = proto.Field( - proto.MESSAGE, - number=5, - oneof='data_item', - message='ByteContentItem', - ) - - -class Table(proto.Message): - r"""Structured content to inspect. Up to 50,000 ``Value``\ s per request - allowed. See - https://cloud.google.com/dlp/docs/inspecting-structured-text#inspecting_a_table - to learn more. - - Attributes: - headers (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Headers of the table. - rows (MutableSequence[google.cloud.dlp_v2.types.Table.Row]): - Rows of the table. - """ - - class Row(proto.Message): - r"""Values of the row. - - Attributes: - values (MutableSequence[google.cloud.dlp_v2.types.Value]): - Individual cells. - """ - - values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - - headers: MutableSequence[storage.FieldId] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - rows: MutableSequence[Row] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=Row, - ) - - -class InspectResult(proto.Message): - r"""All the findings for a single scanned item. - - Attributes: - findings (MutableSequence[google.cloud.dlp_v2.types.Finding]): - List of findings for an item. - findings_truncated (bool): - If true, then this item might have more - findings than were returned, and the findings - returned are an arbitrary subset of all - findings. The findings list might be truncated - because the input items were too large, or - because the server reached the maximum amount of - resources allowed for a single API call. For - best results, divide the input into smaller - batches. - """ - - findings: MutableSequence['Finding'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Finding', - ) - findings_truncated: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -class Finding(proto.Message): - r"""Represents a piece of potentially sensitive content. - - Attributes: - name (str): - Resource name in format - projects/{project}/locations/{location}/findings/{finding} - Populated only when viewing persisted findings. - quote (str): - The content that was found. Even if the content is not - textual, it may be converted to a textual representation - here. Provided if ``include_quote`` is true and the finding - is less than or equal to 4096 bytes long. If the finding - exceeds 4096 bytes in length, the quote may be omitted. - info_type (google.cloud.dlp_v2.types.InfoType): - The type of content that might have been found. Provided if - ``excluded_types`` is false. - likelihood (google.cloud.dlp_v2.types.Likelihood): - Confidence of how likely it is that the ``info_type`` is - correct. - location (google.cloud.dlp_v2.types.Location): - Where the content was found. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp when finding was detected. - quote_info (google.cloud.dlp_v2.types.QuoteInfo): - Contains data parsed from quotes. Only populated if - include_quote was set to true and a supported infoType was - requested. Currently supported infoTypes: DATE, - DATE_OF_BIRTH and TIME. - resource_name (str): - The job that stored the finding. - trigger_name (str): - Job trigger name, if applicable, for this - finding. - labels (MutableMapping[str, str]): - The labels associated with this ``Finding``. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 10 labels can be associated with a given - finding. - - Examples: - - - ``"environment" : "production"`` - - ``"pipeline" : "etl"`` - job_create_time (google.protobuf.timestamp_pb2.Timestamp): - Time the job started that produced this - finding. - job_name (str): - The job that stored the finding. - finding_id (str): - The unique finding id. - """ - - name: str = proto.Field( - proto.STRING, - number=14, - ) - quote: str = proto.Field( - proto.STRING, - number=1, - ) - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=2, - message=storage.InfoType, - ) - likelihood: storage.Likelihood = proto.Field( - proto.ENUM, - number=3, - enum=storage.Likelihood, - ) - location: 'Location' = proto.Field( - proto.MESSAGE, - number=4, - message='Location', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - quote_info: 'QuoteInfo' = proto.Field( - proto.MESSAGE, - number=7, - message='QuoteInfo', - ) - resource_name: str = proto.Field( - proto.STRING, - number=8, - ) - trigger_name: str = proto.Field( - proto.STRING, - number=9, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=10, - ) - job_create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - job_name: str = proto.Field( - proto.STRING, - number=13, - ) - finding_id: str = proto.Field( - proto.STRING, - number=15, - ) - - -class Location(proto.Message): - r"""Specifies the location of the finding. - - Attributes: - byte_range (google.cloud.dlp_v2.types.Range): - Zero-based byte offsets delimiting the - finding. These are relative to the finding's - containing element. Note that when the content - is not textual, this references the UTF-8 - encoded textual representation of the content. - Omitted if content is an image. - codepoint_range (google.cloud.dlp_v2.types.Range): - Unicode character offsets delimiting the - finding. These are relative to the finding's - containing element. Provided when the content is - text. - content_locations (MutableSequence[google.cloud.dlp_v2.types.ContentLocation]): - List of nested objects pointing to the - precise location of the finding within the file - or record. - container (google.cloud.dlp_v2.types.Container): - Information about the container where this - finding occurred, if available. - """ - - byte_range: 'Range' = proto.Field( - proto.MESSAGE, - number=1, - message='Range', - ) - codepoint_range: 'Range' = proto.Field( - proto.MESSAGE, - number=2, - message='Range', - ) - content_locations: MutableSequence['ContentLocation'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='ContentLocation', - ) - container: 'Container' = proto.Field( - proto.MESSAGE, - number=8, - message='Container', - ) - - -class ContentLocation(proto.Message): - r"""Precise location of the finding within a document, record, - image, or metadata container. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - container_name (str): - Name of the container where the finding is located. The top - level name is the source file name or table name. Names of - some common storage containers are formatted as follows: - - - BigQuery tables: ``{project_id}:{dataset_id}.{table_id}`` - - Cloud Storage files: ``gs://{bucket}/{path}`` - - Datastore namespace: {namespace} - - Nested names could be absent if the embedded object has no - string identifier (for example, an image contained within a - document). - record_location (google.cloud.dlp_v2.types.RecordLocation): - Location within a row or record of a database - table. - - This field is a member of `oneof`_ ``location``. - image_location (google.cloud.dlp_v2.types.ImageLocation): - Location within an image's pixels. - - This field is a member of `oneof`_ ``location``. - document_location (google.cloud.dlp_v2.types.DocumentLocation): - Location data for document files. - - This field is a member of `oneof`_ ``location``. - metadata_location (google.cloud.dlp_v2.types.MetadataLocation): - Location within the metadata for inspected - content. - - This field is a member of `oneof`_ ``location``. - container_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Finding container modification timestamp, if applicable. For - Cloud Storage, this field contains the last file - modification timestamp. For a BigQuery table, this field - contains the last_modified_time property. For Datastore, - this field isn't populated. - container_version (str): - Finding container version, if available - ("generation" for Cloud Storage). - """ - - container_name: str = proto.Field( - proto.STRING, - number=1, - ) - record_location: 'RecordLocation' = proto.Field( - proto.MESSAGE, - number=2, - oneof='location', - message='RecordLocation', - ) - image_location: 'ImageLocation' = proto.Field( - proto.MESSAGE, - number=3, - oneof='location', - message='ImageLocation', - ) - document_location: 'DocumentLocation' = proto.Field( - proto.MESSAGE, - number=5, - oneof='location', - message='DocumentLocation', - ) - metadata_location: 'MetadataLocation' = proto.Field( - proto.MESSAGE, - number=8, - oneof='location', - message='MetadataLocation', - ) - container_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - container_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class MetadataLocation(proto.Message): - r"""Metadata Location - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - type_ (google.cloud.dlp_v2.types.MetadataType): - Type of metadata containing the finding. - storage_label (google.cloud.dlp_v2.types.StorageMetadataLabel): - Storage metadata. - - This field is a member of `oneof`_ ``label``. - """ - - type_: 'MetadataType' = proto.Field( - proto.ENUM, - number=1, - enum='MetadataType', - ) - storage_label: 'StorageMetadataLabel' = proto.Field( - proto.MESSAGE, - number=3, - oneof='label', - message='StorageMetadataLabel', - ) - - -class StorageMetadataLabel(proto.Message): - r"""Storage metadata label to indicate which metadata entry - contains findings. - - Attributes: - key (str): - - """ - - key: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DocumentLocation(proto.Message): - r"""Location of a finding within a document. - - Attributes: - file_offset (int): - Offset of the line, from the beginning of the - file, where the finding is located. - """ - - file_offset: int = proto.Field( - proto.INT64, - number=1, - ) - - -class RecordLocation(proto.Message): - r"""Location of a finding within a row or record. - - Attributes: - record_key (google.cloud.dlp_v2.types.RecordKey): - Key of the finding. - field_id (google.cloud.dlp_v2.types.FieldId): - Field id of the field containing the finding. - table_location (google.cloud.dlp_v2.types.TableLocation): - Location within a ``ContentItem.Table``. - """ - - record_key: storage.RecordKey = proto.Field( - proto.MESSAGE, - number=1, - message=storage.RecordKey, - ) - field_id: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - table_location: 'TableLocation' = proto.Field( - proto.MESSAGE, - number=3, - message='TableLocation', - ) - - -class TableLocation(proto.Message): - r"""Location of a finding within a table. - - Attributes: - row_index (int): - The zero-based index of the row where the finding is - located. Only populated for resources that have a natural - ordering, not BigQuery. In BigQuery, to identify the row a - finding came from, populate - BigQueryOptions.identifying_fields with your primary key - column names and when you store the findings the value of - those columns will be stored inside of Finding. - """ - - row_index: int = proto.Field( - proto.INT64, - number=1, - ) - - -class Container(proto.Message): - r"""Represents a container that may contain DLP findings. - Examples of a container include a file, table, or database - record. - - Attributes: - type_ (str): - Container type, for example BigQuery or Cloud - Storage. - project_id (str): - Project where the finding was found. - Can be different from the project that owns the - finding. - full_path (str): - A string representation of the full container - name. Examples: - - BigQuery: 'Project:DataSetId.TableId' - - Cloud Storage: - 'gs://Bucket/folders/filename.txt' - root_path (str): - The root of the container. Examples: - - - For BigQuery table ``project_id:dataset_id.table_id``, - the root is ``dataset_id`` - - For Cloud Storage file - ``gs://bucket/folder/filename.txt``, the root is - ``gs://bucket`` - relative_path (str): - The rest of the path after the root. Examples: - - - For BigQuery table ``project_id:dataset_id.table_id``, - the relative path is ``table_id`` - - For Cloud Storage file - ``gs://bucket/folder/filename.txt``, the relative path is - ``folder/filename.txt`` - update_time (google.protobuf.timestamp_pb2.Timestamp): - Findings container modification timestamp, if applicable. - For Cloud Storage, this field contains the last file - modification timestamp. For a BigQuery table, this field - contains the last_modified_time property. For Datastore, - this field isn't populated. - version (str): - Findings container version, if available - ("generation" for Cloud Storage). - """ - - type_: str = proto.Field( - proto.STRING, - number=1, - ) - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - full_path: str = proto.Field( - proto.STRING, - number=3, - ) - root_path: str = proto.Field( - proto.STRING, - number=4, - ) - relative_path: str = proto.Field( - proto.STRING, - number=5, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class Range(proto.Message): - r"""Generic half-open interval [start, end) - - Attributes: - start (int): - Index of the first character of the range - (inclusive). - end (int): - Index of the last character of the range - (exclusive). - """ - - start: int = proto.Field( - proto.INT64, - number=1, - ) - end: int = proto.Field( - proto.INT64, - number=2, - ) - - -class ImageLocation(proto.Message): - r"""Location of the finding within an image. - - Attributes: - bounding_boxes (MutableSequence[google.cloud.dlp_v2.types.BoundingBox]): - Bounding boxes locating the pixels within the - image containing the finding. - """ - - bounding_boxes: MutableSequence['BoundingBox'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='BoundingBox', - ) - - -class BoundingBox(proto.Message): - r"""Bounding box encompassing detected text within an image. - - Attributes: - top (int): - Top coordinate of the bounding box. (0,0) is - upper left. - left (int): - Left coordinate of the bounding box. (0,0) is - upper left. - width (int): - Width of the bounding box in pixels. - height (int): - Height of the bounding box in pixels. - """ - - top: int = proto.Field( - proto.INT32, - number=1, - ) - left: int = proto.Field( - proto.INT32, - number=2, - ) - width: int = proto.Field( - proto.INT32, - number=3, - ) - height: int = proto.Field( - proto.INT32, - number=4, - ) - - -class RedactImageRequest(proto.Message): - r"""Request to search for potentially sensitive info in an image - and redact it by covering it with a colored rectangle. - - Attributes: - parent (str): - Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - location_id (str): - Deprecated. This field has no effect. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. - image_redaction_configs (MutableSequence[google.cloud.dlp_v2.types.RedactImageRequest.ImageRedactionConfig]): - The configuration for specifying what content - to redact from images. - include_findings (bool): - Whether the response should include findings - along with the redacted image. - byte_item (google.cloud.dlp_v2.types.ByteContentItem): - The content must be PNG, JPEG, SVG or BMP. - """ - - class ImageRedactionConfig(proto.Message): - r"""Configuration for determining how redaction of images should - occur. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - Only one per info_type should be provided per request. If - not specified, and redact_all_text is false, the DLP API - will redact all text that it matches against all info_types - that are found, but not specified in another - ImageRedactionConfig. - - This field is a member of `oneof`_ ``target``. - redact_all_text (bool): - If true, all text found in the image, regardless whether it - matches an info_type, is redacted. Only one should be - provided. - - This field is a member of `oneof`_ ``target``. - redaction_color (google.cloud.dlp_v2.types.Color): - The color to use when redacting content from - an image. If not specified, the default is - black. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - oneof='target', - message=storage.InfoType, - ) - redact_all_text: bool = proto.Field( - proto.BOOL, - number=2, - oneof='target', - ) - redaction_color: 'Color' = proto.Field( - proto.MESSAGE, - number=3, - message='Color', - ) - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - location_id: str = proto.Field( - proto.STRING, - number=8, - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - image_redaction_configs: MutableSequence[ImageRedactionConfig] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=ImageRedactionConfig, - ) - include_findings: bool = proto.Field( - proto.BOOL, - number=6, - ) - byte_item: 'ByteContentItem' = proto.Field( - proto.MESSAGE, - number=7, - message='ByteContentItem', - ) - - -class Color(proto.Message): - r"""Represents a color in the RGB color space. - - Attributes: - red (float): - The amount of red in the color as a value in the interval - [0, 1]. - green (float): - The amount of green in the color as a value in the interval - [0, 1]. - blue (float): - The amount of blue in the color as a value in the interval - [0, 1]. - """ - - red: float = proto.Field( - proto.FLOAT, - number=1, - ) - green: float = proto.Field( - proto.FLOAT, - number=2, - ) - blue: float = proto.Field( - proto.FLOAT, - number=3, - ) - - -class RedactImageResponse(proto.Message): - r"""Results of redacting an image. - - Attributes: - redacted_image (bytes): - The redacted image. The type will be the same - as the original image. - extracted_text (str): - If an image was being inspected and the InspectConfig's - include_quote was set to true, then this field will include - all text, if any, that was found in the image. - inspect_result (google.cloud.dlp_v2.types.InspectResult): - The findings. Populated when include_findings in the request - is true. - """ - - redacted_image: bytes = proto.Field( - proto.BYTES, - number=1, - ) - extracted_text: str = proto.Field( - proto.STRING, - number=2, - ) - inspect_result: 'InspectResult' = proto.Field( - proto.MESSAGE, - number=3, - message='InspectResult', - ) - - -class DeidentifyContentRequest(proto.Message): - r"""Request to de-identify a ContentItem. - - Attributes: - parent (str): - Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): - Configuration for the de-identification of the content item. - Items specified here will override the template referenced - by the deidentify_template_name argument. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. Items specified here will - override the template referenced by the - inspect_template_name argument. - item (google.cloud.dlp_v2.types.ContentItem): - The item to de-identify. Will be treated as text. - - This value must be of type - [Table][google.privacy.dlp.v2.Table] if your - [deidentify_config][google.privacy.dlp.v2.DeidentifyContentRequest.deidentify_config] - is a - [RecordTransformations][google.privacy.dlp.v2.RecordTransformations] - object. - inspect_template_name (str): - Template to use. Any configuration directly specified in - inspect_config will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - deidentify_template_name (str): - Template to use. Any configuration directly specified in - deidentify_config will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - deidentify_config: 'DeidentifyConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyConfig', - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='InspectConfig', - ) - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=4, - message='ContentItem', - ) - inspect_template_name: str = proto.Field( - proto.STRING, - number=5, - ) - deidentify_template_name: str = proto.Field( - proto.STRING, - number=6, - ) - location_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -class DeidentifyContentResponse(proto.Message): - r"""Results of de-identifying a ContentItem. - - Attributes: - item (google.cloud.dlp_v2.types.ContentItem): - The de-identified item. - overview (google.cloud.dlp_v2.types.TransformationOverview): - An overview of the changes that were made on the ``item``. - """ - - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=1, - message='ContentItem', - ) - overview: 'TransformationOverview' = proto.Field( - proto.MESSAGE, - number=2, - message='TransformationOverview', - ) - - -class ReidentifyContentRequest(proto.Message): - r"""Request to re-identify an item. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - reidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): - Configuration for the re-identification of the content item. - This field shares the same proto message type that is used - for de-identification, however its usage here is for the - reversal of the previous de-identification. - Re-identification is performed by examining the - transformations used to de-identify the items and executing - the reverse. This requires that only reversible - transformations be provided here. The reversible - transformations are: - - - ``CryptoDeterministicConfig`` - - ``CryptoReplaceFfxFpeConfig`` - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. - item (google.cloud.dlp_v2.types.ContentItem): - The item to re-identify. Will be treated as - text. - inspect_template_name (str): - Template to use. Any configuration directly specified in - ``inspect_config`` will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - reidentify_template_name (str): - Template to use. References an instance of - ``DeidentifyTemplate``. Any configuration directly specified - in ``reidentify_config`` or ``inspect_config`` will override - those set in the template. The ``DeidentifyTemplate`` used - must include only reversible transformations. Singular - fields that are set in this request will replace their - corresponding fields in the template. Repeated fields are - appended. Singular sub-messages and groups are recursively - merged. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - reidentify_config: 'DeidentifyConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyConfig', - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='InspectConfig', - ) - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=4, - message='ContentItem', - ) - inspect_template_name: str = proto.Field( - proto.STRING, - number=5, - ) - reidentify_template_name: str = proto.Field( - proto.STRING, - number=6, - ) - location_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -class ReidentifyContentResponse(proto.Message): - r"""Results of re-identifying an item. - - Attributes: - item (google.cloud.dlp_v2.types.ContentItem): - The re-identified item. - overview (google.cloud.dlp_v2.types.TransformationOverview): - An overview of the changes that were made to the ``item``. - """ - - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=1, - message='ContentItem', - ) - overview: 'TransformationOverview' = proto.Field( - proto.MESSAGE, - number=2, - message='TransformationOverview', - ) - - -class InspectContentRequest(proto.Message): - r"""Request to search for potentially sensitive info in a - ContentItem. - - Attributes: - parent (str): - Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. What specified here will - override the template referenced by the - inspect_template_name argument. - item (google.cloud.dlp_v2.types.ContentItem): - The item to inspect. - inspect_template_name (str): - Template to use. Any configuration directly specified in - inspect_config will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=3, - message='ContentItem', - ) - inspect_template_name: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class InspectContentResponse(proto.Message): - r"""Results of inspecting an item. - - Attributes: - result (google.cloud.dlp_v2.types.InspectResult): - The findings. - """ - - result: 'InspectResult' = proto.Field( - proto.MESSAGE, - number=1, - message='InspectResult', - ) - - -class OutputStorageConfig(proto.Message): - r"""Cloud repository for storing output. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Store findings in an existing table or a new table in an - existing dataset. If table_id is not set a new one will be - generated for you with the following format: - dlp_googleapis_yyyy_mm_dd_[dlp_job_id]. Pacific time zone - will be used for generating the date details. - - For Inspect, each column in an existing output table must - have the same name, type, and mode of a field in the - ``Finding`` object. - - For Risk, an existing output table should be the output of a - previous Risk analysis job run on the same source table, - with the same privacy metric and quasi-identifiers. Risk - jobs that analyze the same table but compute a different - privacy metric, or use different sets of quasi-identifiers, - cannot store their results in the same table. - - This field is a member of `oneof`_ ``type``. - output_schema (google.cloud.dlp_v2.types.OutputStorageConfig.OutputSchema): - Schema used for writing the findings for Inspect jobs. This - field is only used for Inspect and must be unspecified for - Risk jobs. Columns are derived from the ``Finding`` object. - If appending to an existing table, any columns from the - predefined schema that are missing will be added. No columns - in the existing table will be deleted. - - If unspecified, then all available columns will be used for - a new table or an (existing) table with no schema, and no - changes will be made to an existing table that has a schema. - Only for use with external storage. - """ - class OutputSchema(proto.Enum): - r"""Predefined schemas for storing findings. - Only for use with external storage. - - Values: - OUTPUT_SCHEMA_UNSPECIFIED (0): - Unused. - BASIC_COLUMNS (1): - Basic schema including only ``info_type``, ``quote``, - ``certainty``, and ``timestamp``. - GCS_COLUMNS (2): - Schema tailored to findings from scanning - Cloud Storage. - DATASTORE_COLUMNS (3): - Schema tailored to findings from scanning - Google Datastore. - BIG_QUERY_COLUMNS (4): - Schema tailored to findings from scanning - Google BigQuery. - ALL_COLUMNS (5): - Schema containing all columns. - """ - OUTPUT_SCHEMA_UNSPECIFIED = 0 - BASIC_COLUMNS = 1 - GCS_COLUMNS = 2 - DATASTORE_COLUMNS = 3 - BIG_QUERY_COLUMNS = 4 - ALL_COLUMNS = 5 - - table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.BigQueryTable, - ) - output_schema: OutputSchema = proto.Field( - proto.ENUM, - number=3, - enum=OutputSchema, - ) - - -class InfoTypeStats(proto.Message): - r"""Statistics regarding a specific InfoType. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - The type of finding this stat is for. - count (int): - Number of findings for this infoType. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - count: int = proto.Field( - proto.INT64, - number=2, - ) - - -class InspectDataSourceDetails(proto.Message): - r"""The results of an inspect DataSource job. - - Attributes: - requested_options (google.cloud.dlp_v2.types.InspectDataSourceDetails.RequestedOptions): - The configuration used for this job. - result (google.cloud.dlp_v2.types.InspectDataSourceDetails.Result): - A summary of the outcome of this inspection - job. - """ - - class RequestedOptions(proto.Message): - r"""Snapshot of the inspection configuration. - - Attributes: - snapshot_inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - If run with an InspectTemplate, a snapshot of - its state at the time of this run. - job_config (google.cloud.dlp_v2.types.InspectJobConfig): - Inspect config. - """ - - snapshot_inspect_template: 'InspectTemplate' = proto.Field( - proto.MESSAGE, - number=1, - message='InspectTemplate', - ) - job_config: 'InspectJobConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='InspectJobConfig', - ) - - class Result(proto.Message): - r"""All result fields mentioned below are updated while the job - is processing. - - Attributes: - processed_bytes (int): - Total size in bytes that were processed. - total_estimated_bytes (int): - Estimate of the number of bytes to process. - info_type_stats (MutableSequence[google.cloud.dlp_v2.types.InfoTypeStats]): - Statistics of how many instances of each info - type were found during inspect job. - hybrid_stats (google.cloud.dlp_v2.types.HybridInspectStatistics): - Statistics related to the processing of - hybrid inspect. - """ - - processed_bytes: int = proto.Field( - proto.INT64, - number=1, - ) - total_estimated_bytes: int = proto.Field( - proto.INT64, - number=2, - ) - info_type_stats: MutableSequence['InfoTypeStats'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='InfoTypeStats', - ) - hybrid_stats: 'HybridInspectStatistics' = proto.Field( - proto.MESSAGE, - number=7, - message='HybridInspectStatistics', - ) - - requested_options: RequestedOptions = proto.Field( - proto.MESSAGE, - number=2, - message=RequestedOptions, - ) - result: Result = proto.Field( - proto.MESSAGE, - number=3, - message=Result, - ) - - -class HybridInspectStatistics(proto.Message): - r"""Statistics related to processing hybrid inspect requests. - - Attributes: - processed_count (int): - The number of hybrid inspection requests - processed within this job. - aborted_count (int): - The number of hybrid inspection requests - aborted because the job ran out of quota or was - ended before they could be processed. - pending_count (int): - The number of hybrid requests currently being processed. - Only populated when called via method ``getDlpJob``. A burst - of traffic may cause hybrid inspect requests to be enqueued. - Processing will take place as quickly as possible, but - resource limitations may impact how long a request is - enqueued for. - """ - - processed_count: int = proto.Field( - proto.INT64, - number=1, - ) - aborted_count: int = proto.Field( - proto.INT64, - number=2, - ) - pending_count: int = proto.Field( - proto.INT64, - number=3, - ) - - -class InfoTypeDescription(proto.Message): - r"""InfoType description. - - Attributes: - name (str): - Internal name of the infoType. - display_name (str): - Human readable form of the infoType name. - supported_by (MutableSequence[google.cloud.dlp_v2.types.InfoTypeSupportedBy]): - Which parts of the API supports this - InfoType. - description (str): - Description of the infotype. Translated when - language is provided in the request. - versions (MutableSequence[google.cloud.dlp_v2.types.VersionDescription]): - A list of available versions for the - infotype. - categories (MutableSequence[google.cloud.dlp_v2.types.InfoTypeCategory]): - The category of the infoType. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - supported_by: MutableSequence['InfoTypeSupportedBy'] = proto.RepeatedField( - proto.ENUM, - number=3, - enum='InfoTypeSupportedBy', - ) - description: str = proto.Field( - proto.STRING, - number=4, - ) - versions: MutableSequence['VersionDescription'] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message='VersionDescription', - ) - categories: MutableSequence['InfoTypeCategory'] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='InfoTypeCategory', - ) - - -class InfoTypeCategory(proto.Message): - r"""Classification of infoTypes to organize them according to - geographic location, industry, and data type. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - location_category (google.cloud.dlp_v2.types.InfoTypeCategory.LocationCategory): - The region or country that issued the ID or - document represented by the infoType. - - This field is a member of `oneof`_ ``category``. - industry_category (google.cloud.dlp_v2.types.InfoTypeCategory.IndustryCategory): - The group of relevant businesses where this - infoType is commonly used - - This field is a member of `oneof`_ ``category``. - type_category (google.cloud.dlp_v2.types.InfoTypeCategory.TypeCategory): - The class of identifiers where this infoType - belongs - - This field is a member of `oneof`_ ``category``. - """ - class LocationCategory(proto.Enum): - r"""Enum of the current locations. - We might add more locations in the future. - - Values: - LOCATION_UNSPECIFIED (0): - Unused location - GLOBAL (1): - The infoType is not issued by or tied to a - specific region, but is used almost everywhere. - ARGENTINA (2): - The infoType is typically used in Argentina. - AUSTRALIA (3): - The infoType is typically used in Australia. - BELGIUM (4): - The infoType is typically used in Belgium. - BRAZIL (5): - The infoType is typically used in Brazil. - CANADA (6): - The infoType is typically used in Canada. - CHILE (7): - The infoType is typically used in Chile. - CHINA (8): - The infoType is typically used in China. - COLOMBIA (9): - The infoType is typically used in Colombia. - DENMARK (10): - The infoType is typically used in Denmark. - FRANCE (11): - The infoType is typically used in France. - FINLAND (12): - The infoType is typically used in Finland. - GERMANY (13): - The infoType is typically used in Germany. - HONG_KONG (14): - The infoType is typically used in Hong Kong. - INDIA (15): - The infoType is typically used in India. - INDONESIA (16): - The infoType is typically used in Indonesia. - IRELAND (17): - The infoType is typically used in Ireland. - ISRAEL (18): - The infoType is typically used in Israel. - ITALY (19): - The infoType is typically used in Italy. - JAPAN (20): - The infoType is typically used in Japan. - KOREA (21): - The infoType is typically used in Korea. - MEXICO (22): - The infoType is typically used in Mexico. - THE_NETHERLANDS (23): - The infoType is typically used in the - Netherlands. - NORWAY (24): - The infoType is typically used in Norway. - PARAGUAY (25): - The infoType is typically used in Paraguay. - PERU (26): - The infoType is typically used in Peru. - POLAND (27): - The infoType is typically used in Poland. - PORTUGAL (28): - The infoType is typically used in Portugal. - SINGAPORE (29): - The infoType is typically used in Singapore. - SOUTH_AFRICA (30): - The infoType is typically used in South - Africa. - SPAIN (31): - The infoType is typically used in Spain. - SWEDEN (32): - The infoType is typically used in Sweden. - TAIWAN (33): - The infoType is typically used in Taiwan. - THAILAND (34): - The infoType is typically used in Thailand. - TURKEY (35): - The infoType is typically used in Turkey. - UNITED_KINGDOM (36): - The infoType is typically used in the United - Kingdom. - UNITED_STATES (37): - The infoType is typically used in the United - States. - URUGUAY (38): - The infoType is typically used in Uruguay. - VENEZUELA (39): - The infoType is typically used in Venezuela. - INTERNAL (40): - The infoType is typically used in Google - internally. - NEW_ZEALAND (41): - The infoType is typically used in New - Zealand. - """ - LOCATION_UNSPECIFIED = 0 - GLOBAL = 1 - ARGENTINA = 2 - AUSTRALIA = 3 - BELGIUM = 4 - BRAZIL = 5 - CANADA = 6 - CHILE = 7 - CHINA = 8 - COLOMBIA = 9 - DENMARK = 10 - FRANCE = 11 - FINLAND = 12 - GERMANY = 13 - HONG_KONG = 14 - INDIA = 15 - INDONESIA = 16 - IRELAND = 17 - ISRAEL = 18 - ITALY = 19 - JAPAN = 20 - KOREA = 21 - MEXICO = 22 - THE_NETHERLANDS = 23 - NORWAY = 24 - PARAGUAY = 25 - PERU = 26 - POLAND = 27 - PORTUGAL = 28 - SINGAPORE = 29 - SOUTH_AFRICA = 30 - SPAIN = 31 - SWEDEN = 32 - TAIWAN = 33 - THAILAND = 34 - TURKEY = 35 - UNITED_KINGDOM = 36 - UNITED_STATES = 37 - URUGUAY = 38 - VENEZUELA = 39 - INTERNAL = 40 - NEW_ZEALAND = 41 - - class IndustryCategory(proto.Enum): - r"""Enum of the current industries in the category. - We might add more industries in the future. - - Values: - INDUSTRY_UNSPECIFIED (0): - Unused industry - FINANCE (1): - The infoType is typically used in the finance - industry. - HEALTH (2): - The infoType is typically used in the health - industry. - TELECOMMUNICATIONS (3): - The infoType is typically used in the - telecommunications industry. - """ - INDUSTRY_UNSPECIFIED = 0 - FINANCE = 1 - HEALTH = 2 - TELECOMMUNICATIONS = 3 - - class TypeCategory(proto.Enum): - r"""Enum of the current types in the category. - We might add more types in the future. - - Values: - TYPE_UNSPECIFIED (0): - Unused type - PII (1): - Personally identifiable information, for - example, a name or phone number - SPII (2): - Personally identifiable information that is - especially sensitive, for example, a passport - number. - DEMOGRAPHIC (3): - Attributes that can partially identify - someone, especially in combination with other - attributes, like age, height, and gender. - CREDENTIAL (4): - Confidential or secret information, for - example, a password. - GOVERNMENT_ID (5): - An identification document issued by a - government. - DOCUMENT (6): - A document, for example, a resume or source - code. - CONTEXTUAL_INFORMATION (7): - Information that is not sensitive on its own, - but provides details about the circumstances - surrounding an entity or an event. - """ - TYPE_UNSPECIFIED = 0 - PII = 1 - SPII = 2 - DEMOGRAPHIC = 3 - CREDENTIAL = 4 - GOVERNMENT_ID = 5 - DOCUMENT = 6 - CONTEXTUAL_INFORMATION = 7 - - location_category: LocationCategory = proto.Field( - proto.ENUM, - number=1, - oneof='category', - enum=LocationCategory, - ) - industry_category: IndustryCategory = proto.Field( - proto.ENUM, - number=2, - oneof='category', - enum=IndustryCategory, - ) - type_category: TypeCategory = proto.Field( - proto.ENUM, - number=3, - oneof='category', - enum=TypeCategory, - ) - - -class VersionDescription(proto.Message): - r"""Details about each available version for an infotype. - - Attributes: - version (str): - Name of the version - description (str): - Description of the version. - """ - - version: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListInfoTypesRequest(proto.Message): - r"""Request for the list of infoTypes. - - Attributes: - parent (str): - The parent resource name. - - The format of this value is as follows: - - :: - - locations/LOCATION_ID - language_code (str): - BCP-47 language code for localized infoType - friendly names. If omitted, or if localized - strings are not available, en-US strings will be - returned. - filter (str): - filter to only return infoTypes supported by certain parts - of the API. Defaults to supported_by=INSPECT. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=4, - ) - language_code: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - location_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListInfoTypesResponse(proto.Message): - r"""Response to the ListInfoTypes request. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoTypeDescription]): - Set of sensitive infoTypes. - """ - - info_types: MutableSequence['InfoTypeDescription'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='InfoTypeDescription', - ) - - -class RiskAnalysisJobConfig(proto.Message): - r"""Configuration for a risk analysis job. See - https://cloud.google.com/dlp/docs/concepts-risk-analysis to - learn more. - - Attributes: - privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): - Privacy metric to compute. - source_table (google.cloud.dlp_v2.types.BigQueryTable): - Input dataset to compute metrics over. - actions (MutableSequence[google.cloud.dlp_v2.types.Action]): - Actions to execute at the completion of the - job. Are executed in the order provided. - """ - - privacy_metric: 'PrivacyMetric' = proto.Field( - proto.MESSAGE, - number=1, - message='PrivacyMetric', - ) - source_table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=2, - message=storage.BigQueryTable, - ) - actions: MutableSequence['Action'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='Action', - ) - - -class QuasiId(proto.Message): - r"""A column with a semantic tag attached. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Required. Identifies the column. - info_type (google.cloud.dlp_v2.types.InfoType): - A column can be tagged with a InfoType to use the relevant - public dataset as a statistical model of population, if - available. We currently support US ZIP codes, region codes, - ages and genders. To programmatically obtain the list of - supported InfoTypes, use ListInfoTypes with the - supported_by=RISK_ANALYSIS filter. - - This field is a member of `oneof`_ ``tag``. - custom_tag (str): - A column can be tagged with a custom tag. In - this case, the user must indicate an auxiliary - table that contains statistical information on - the possible values of this column (below). - - This field is a member of `oneof`_ ``tag``. - inferred (google.protobuf.empty_pb2.Empty): - If no semantic tag is indicated, we infer the - statistical model from the distribution of - values in the input data - - This field is a member of `oneof`_ ``tag``. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=2, - oneof='tag', - message=storage.InfoType, - ) - custom_tag: str = proto.Field( - proto.STRING, - number=3, - oneof='tag', - ) - inferred: empty_pb2.Empty = proto.Field( - proto.MESSAGE, - number=4, - oneof='tag', - message=empty_pb2.Empty, - ) - - -class StatisticalTable(proto.Message): - r"""An auxiliary table containing statistical information on the - relative frequency of different quasi-identifiers values. It has - one or several quasi-identifiers columns, and one column that - indicates the relative frequency of each quasi-identifier tuple. - If a tuple is present in the data but not in the auxiliary - table, the corresponding relative frequency is assumed to be - zero (and thus, the tuple is highly reidentifiable). - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Required. Auxiliary table location. - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.StatisticalTable.QuasiIdentifierField]): - Required. Quasi-identifier columns. - relative_frequency (google.cloud.dlp_v2.types.FieldId): - Required. The relative frequency column must - contain a floating-point number between 0 and 1 - (inclusive). Null values are assumed to be zero. - """ - - class QuasiIdentifierField(proto.Message): - r"""A quasi-identifier column has a custom_tag, used to know which - column in the data corresponds to which column in the statistical - model. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Identifies the column. - custom_tag (str): - A column can be tagged with a custom tag. In - this case, the user must indicate an auxiliary - table that contains statistical information on - the possible values of this column (below). - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - custom_tag: str = proto.Field( - proto.STRING, - number=2, - ) - - table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=3, - message=storage.BigQueryTable, - ) - quasi_ids: MutableSequence[QuasiIdentifierField] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=QuasiIdentifierField, - ) - relative_frequency: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - - -class PrivacyMetric(proto.Message): - r"""Privacy metric to compute for reidentification risk analysis. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - numerical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.NumericalStatsConfig): - Numerical stats - - This field is a member of `oneof`_ ``type``. - categorical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.CategoricalStatsConfig): - Categorical stats - - This field is a member of `oneof`_ ``type``. - k_anonymity_config (google.cloud.dlp_v2.types.PrivacyMetric.KAnonymityConfig): - K-anonymity - - This field is a member of `oneof`_ ``type``. - l_diversity_config (google.cloud.dlp_v2.types.PrivacyMetric.LDiversityConfig): - l-diversity - - This field is a member of `oneof`_ ``type``. - k_map_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig): - k-map - - This field is a member of `oneof`_ ``type``. - delta_presence_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.DeltaPresenceEstimationConfig): - delta-presence - - This field is a member of `oneof`_ ``type``. - """ - - class NumericalStatsConfig(proto.Message): - r"""Compute numerical stats over an individual column, including - min, max, and quantiles. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Field to compute numerical stats on. - Supported types are integer, float, date, - datetime, timestamp, time. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - - class CategoricalStatsConfig(proto.Message): - r"""Compute numerical stats over an individual column, including - number of distinct values and value count distribution. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Field to compute categorical stats on. All - column types are supported except for arrays and - structs. However, it may be more informative to - use NumericalStats when the field type is - supported, depending on the data. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - - class KAnonymityConfig(proto.Message): - r"""k-anonymity metric, used for analysis of reidentification - risk. - - Attributes: - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Set of fields to compute k-anonymity over. - When multiple fields are specified, they are - considered a single composite key. Structs and - repeated data types are not supported; however, - nested fields are supported so long as they are - not structs themselves or nested within a - repeated field. - entity_id (google.cloud.dlp_v2.types.EntityId): - Message indicating that multiple rows might be associated to - a single individual. If the same entity_id is associated to - multiple quasi-identifier tuples over distinct rows, we - consider the entire collection of tuples as the composite - quasi-identifier. This collection is a multiset: the order - in which the different tuples appear in the dataset is - ignored, but their frequency is taken into account. - - Important note: a maximum of 1000 rows can be associated to - a single entity ID. If more rows are associated with the - same entity ID, some might be ignored. - """ - - quasi_ids: MutableSequence[storage.FieldId] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - entity_id: storage.EntityId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.EntityId, - ) - - class LDiversityConfig(proto.Message): - r"""l-diversity metric, used for analysis of reidentification - risk. - - Attributes: - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Set of quasi-identifiers indicating how - equivalence classes are defined for the - l-diversity computation. When multiple fields - are specified, they are considered a single - composite key. - sensitive_attribute (google.cloud.dlp_v2.types.FieldId): - Sensitive field for computing the l-value. - """ - - quasi_ids: MutableSequence[storage.FieldId] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - sensitive_attribute: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - - class KMapEstimationConfig(proto.Message): - r"""Reidentifiability metric. This corresponds to a risk model - similar to what is called "journalist risk" in the literature, - except the attack dataset is statistically modeled instead of - being perfectly known. This can be done using publicly available - data (like the US Census), or using a custom statistical model - (indicated as one or several BigQuery tables), or by - extrapolating from the distribution of values in the input - dataset. - - Attributes: - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.TaggedField]): - Required. Fields considered to be - quasi-identifiers. No two columns can have the - same tag. - region_code (str): - ISO 3166-1 alpha-2 region code to use in the statistical - modeling. Set if no column is tagged with a region-specific - InfoType (like US_ZIP_5) or a region code. - auxiliary_tables (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable]): - Several auxiliary tables can be used in the analysis. Each - custom_tag used to tag a quasi-identifiers column must - appear in exactly one column of one auxiliary table. - """ - - class TaggedField(proto.Message): - r"""A column with a semantic tag attached. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Required. Identifies the column. - info_type (google.cloud.dlp_v2.types.InfoType): - A column can be tagged with a InfoType to use the relevant - public dataset as a statistical model of population, if - available. We currently support US ZIP codes, region codes, - ages and genders. To programmatically obtain the list of - supported InfoTypes, use ListInfoTypes with the - supported_by=RISK_ANALYSIS filter. - - This field is a member of `oneof`_ ``tag``. - custom_tag (str): - A column can be tagged with a custom tag. In - this case, the user must indicate an auxiliary - table that contains statistical information on - the possible values of this column (below). - - This field is a member of `oneof`_ ``tag``. - inferred (google.protobuf.empty_pb2.Empty): - If no semantic tag is indicated, we infer the - statistical model from the distribution of - values in the input data - - This field is a member of `oneof`_ ``tag``. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=2, - oneof='tag', - message=storage.InfoType, - ) - custom_tag: str = proto.Field( - proto.STRING, - number=3, - oneof='tag', - ) - inferred: empty_pb2.Empty = proto.Field( - proto.MESSAGE, - number=4, - oneof='tag', - message=empty_pb2.Empty, - ) - - class AuxiliaryTable(proto.Message): - r"""An auxiliary table contains statistical information on the - relative frequency of different quasi-identifiers values. It has - one or several quasi-identifiers columns, and one column that - indicates the relative frequency of each quasi-identifier tuple. - If a tuple is present in the data but not in the auxiliary - table, the corresponding relative frequency is assumed to be - zero (and thus, the tuple is highly reidentifiable). - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Required. Auxiliary table location. - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField]): - Required. Quasi-identifier columns. - relative_frequency (google.cloud.dlp_v2.types.FieldId): - Required. The relative frequency column must - contain a floating-point number between 0 and 1 - (inclusive). Null values are assumed to be zero. - """ - - class QuasiIdField(proto.Message): - r"""A quasi-identifier column has a custom_tag, used to know which - column in the data corresponds to which column in the statistical - model. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Identifies the column. - custom_tag (str): - A auxiliary field. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - custom_tag: str = proto.Field( - proto.STRING, - number=2, - ) - - table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=3, - message=storage.BigQueryTable, - ) - quasi_ids: MutableSequence['PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField', - ) - relative_frequency: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - - quasi_ids: MutableSequence['PrivacyMetric.KMapEstimationConfig.TaggedField'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='PrivacyMetric.KMapEstimationConfig.TaggedField', - ) - region_code: str = proto.Field( - proto.STRING, - number=2, - ) - auxiliary_tables: MutableSequence['PrivacyMetric.KMapEstimationConfig.AuxiliaryTable'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable', - ) - - class DeltaPresenceEstimationConfig(proto.Message): - r"""δ-presence metric, used to estimate how likely it is for an - attacker to figure out that one given individual appears in a - de-identified dataset. Similarly to the k-map metric, we cannot - compute δ-presence exactly without knowing the attack dataset, - so we use a statistical model instead. - - Attributes: - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.QuasiId]): - Required. Fields considered to be - quasi-identifiers. No two fields can have the - same tag. - region_code (str): - ISO 3166-1 alpha-2 region code to use in the statistical - modeling. Set if no column is tagged with a region-specific - InfoType (like US_ZIP_5) or a region code. - auxiliary_tables (MutableSequence[google.cloud.dlp_v2.types.StatisticalTable]): - Several auxiliary tables can be used in the analysis. Each - custom_tag used to tag a quasi-identifiers field must appear - in exactly one field of one auxiliary table. - """ - - quasi_ids: MutableSequence['QuasiId'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='QuasiId', - ) - region_code: str = proto.Field( - proto.STRING, - number=2, - ) - auxiliary_tables: MutableSequence['StatisticalTable'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='StatisticalTable', - ) - - numerical_stats_config: NumericalStatsConfig = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=NumericalStatsConfig, - ) - categorical_stats_config: CategoricalStatsConfig = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message=CategoricalStatsConfig, - ) - k_anonymity_config: KAnonymityConfig = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message=KAnonymityConfig, - ) - l_diversity_config: LDiversityConfig = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message=LDiversityConfig, - ) - k_map_estimation_config: KMapEstimationConfig = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message=KMapEstimationConfig, - ) - delta_presence_estimation_config: DeltaPresenceEstimationConfig = proto.Field( - proto.MESSAGE, - number=6, - oneof='type', - message=DeltaPresenceEstimationConfig, - ) - - -class AnalyzeDataSourceRiskDetails(proto.Message): - r"""Result of a risk analysis operation request. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - requested_privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): - Privacy metric to compute. - requested_source_table (google.cloud.dlp_v2.types.BigQueryTable): - Input dataset to compute metrics over. - numerical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.NumericalStatsResult): - Numerical stats result - - This field is a member of `oneof`_ ``result``. - categorical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult): - Categorical stats result - - This field is a member of `oneof`_ ``result``. - k_anonymity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult): - K-anonymity result - - This field is a member of `oneof`_ ``result``. - l_diversity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult): - L-divesity result - - This field is a member of `oneof`_ ``result``. - k_map_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult): - K-map result - - This field is a member of `oneof`_ ``result``. - delta_presence_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult): - Delta-presence result - - This field is a member of `oneof`_ ``result``. - requested_options (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.RequestedRiskAnalysisOptions): - The configuration used for this job. - """ - - class NumericalStatsResult(proto.Message): - r"""Result of the numerical stats computation. - - Attributes: - min_value (google.cloud.dlp_v2.types.Value): - Minimum value appearing in the column. - max_value (google.cloud.dlp_v2.types.Value): - Maximum value appearing in the column. - quantile_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - List of 99 values that partition the set of - field values into 100 equal sized buckets. - """ - - min_value: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - max_value: 'Value' = proto.Field( - proto.MESSAGE, - number=2, - message='Value', - ) - quantile_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='Value', - ) - - class CategoricalStatsResult(proto.Message): - r"""Result of the categorical stats computation. - - Attributes: - value_frequency_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket]): - Histogram of value frequencies in the column. - """ - - class CategoricalStatsHistogramBucket(proto.Message): - r"""Histogram of value frequencies in the column. - - Attributes: - value_frequency_lower_bound (int): - Lower bound on the value frequency of the - values in this bucket. - value_frequency_upper_bound (int): - Upper bound on the value frequency of the - values in this bucket. - bucket_size (int): - Total number of values in this bucket. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.ValueFrequency]): - Sample of value frequencies in this bucket. - The total number of values returned per bucket - is capped at 20. - bucket_value_count (int): - Total number of distinct values in this - bucket. - """ - - value_frequency_lower_bound: int = proto.Field( - proto.INT64, - number=1, - ) - value_frequency_upper_bound: int = proto.Field( - proto.INT64, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=3, - ) - bucket_values: MutableSequence['ValueFrequency'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='ValueFrequency', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=5, - ) - - value_frequency_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket', - ) - - class KAnonymityResult(proto.Message): - r"""Result of the k-anonymity computation. - - Attributes: - equivalence_class_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket]): - Histogram of k-anonymity equivalence classes. - """ - - class KAnonymityEquivalenceClass(proto.Message): - r"""The set of columns' values that share the same ldiversity - value - - Attributes: - quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - Set of values defining the equivalence class. - One value per quasi-identifier column in the - original KAnonymity metric message. The order is - always the same as the original request. - equivalence_class_size (int): - Size of the equivalence class, for example - number of rows with the above set of values. - """ - - quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - equivalence_class_size: int = proto.Field( - proto.INT64, - number=2, - ) - - class KAnonymityHistogramBucket(proto.Message): - r"""Histogram of k-anonymity equivalence classes. - - Attributes: - equivalence_class_size_lower_bound (int): - Lower bound on the size of the equivalence - classes in this bucket. - equivalence_class_size_upper_bound (int): - Upper bound on the size of the equivalence - classes in this bucket. - bucket_size (int): - Total number of equivalence classes in this - bucket. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass]): - Sample of equivalence classes in this bucket. - The total number of classes returned per bucket - is capped at 20. - bucket_value_count (int): - Total number of distinct equivalence classes - in this bucket. - """ - - equivalence_class_size_lower_bound: int = proto.Field( - proto.INT64, - number=1, - ) - equivalence_class_size_upper_bound: int = proto.Field( - proto.INT64, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=3, - ) - bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=5, - ) - - equivalence_class_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket', - ) - - class LDiversityResult(proto.Message): - r"""Result of the l-diversity computation. - - Attributes: - sensitive_value_frequency_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket]): - Histogram of l-diversity equivalence class - sensitive value frequencies. - """ - - class LDiversityEquivalenceClass(proto.Message): - r"""The set of columns' values that share the same ldiversity - value. - - Attributes: - quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - Quasi-identifier values defining the - k-anonymity equivalence class. The order is - always the same as the original request. - equivalence_class_size (int): - Size of the k-anonymity equivalence class. - num_distinct_sensitive_values (int): - Number of distinct sensitive values in this - equivalence class. - top_sensitive_values (MutableSequence[google.cloud.dlp_v2.types.ValueFrequency]): - Estimated frequencies of top sensitive - values. - """ - - quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - equivalence_class_size: int = proto.Field( - proto.INT64, - number=2, - ) - num_distinct_sensitive_values: int = proto.Field( - proto.INT64, - number=3, - ) - top_sensitive_values: MutableSequence['ValueFrequency'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='ValueFrequency', - ) - - class LDiversityHistogramBucket(proto.Message): - r"""Histogram of l-diversity equivalence class sensitive value - frequencies. - - Attributes: - sensitive_value_frequency_lower_bound (int): - Lower bound on the sensitive value - frequencies of the equivalence classes in this - bucket. - sensitive_value_frequency_upper_bound (int): - Upper bound on the sensitive value - frequencies of the equivalence classes in this - bucket. - bucket_size (int): - Total number of equivalence classes in this - bucket. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass]): - Sample of equivalence classes in this bucket. - The total number of classes returned per bucket - is capped at 20. - bucket_value_count (int): - Total number of distinct equivalence classes - in this bucket. - """ - - sensitive_value_frequency_lower_bound: int = proto.Field( - proto.INT64, - number=1, - ) - sensitive_value_frequency_upper_bound: int = proto.Field( - proto.INT64, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=3, - ) - bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=5, - ) - - sensitive_value_frequency_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket', - ) - - class KMapEstimationResult(proto.Message): - r"""Result of the reidentifiability analysis. Note that these - results are an estimation, not exact values. - - Attributes: - k_map_estimation_histogram (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket]): - The intervals [min_anonymity, max_anonymity] do not overlap. - If a value doesn't correspond to any such interval, the - associated frequency is zero. For example, the following - records: {min_anonymity: 1, max_anonymity: 1, frequency: 17} - {min_anonymity: 2, max_anonymity: 3, frequency: 42} - {min_anonymity: 5, max_anonymity: 10, frequency: 99} mean - that there are no record with an estimated anonymity of 4, - 5, or larger than 10. - """ - - class KMapEstimationQuasiIdValues(proto.Message): - r"""A tuple of values for the quasi-identifier columns. - - Attributes: - quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - The quasi-identifier values. - estimated_anonymity (int): - The estimated anonymity for these - quasi-identifier values. - """ - - quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - estimated_anonymity: int = proto.Field( - proto.INT64, - number=2, - ) - - class KMapEstimationHistogramBucket(proto.Message): - r"""A KMapEstimationHistogramBucket message with the following values: - min_anonymity: 3 max_anonymity: 5 frequency: 42 means that there are - 42 records whose quasi-identifier values correspond to 3, 4 or 5 - people in the overlying population. An important particular case is - when min_anonymity = max_anonymity = 1: the frequency field then - corresponds to the number of uniquely identifiable records. - - Attributes: - min_anonymity (int): - Always positive. - max_anonymity (int): - Always greater than or equal to min_anonymity. - bucket_size (int): - Number of records within these anonymity - bounds. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues]): - Sample of quasi-identifier tuple values in - this bucket. The total number of classes - returned per bucket is capped at 20. - bucket_value_count (int): - Total number of distinct quasi-identifier - tuple values in this bucket. - """ - - min_anonymity: int = proto.Field( - proto.INT64, - number=1, - ) - max_anonymity: int = proto.Field( - proto.INT64, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=5, - ) - bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=7, - ) - - k_map_estimation_histogram: MutableSequence['AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket', - ) - - class DeltaPresenceEstimationResult(proto.Message): - r"""Result of the δ-presence computation. Note that these results - are an estimation, not exact values. - - Attributes: - delta_presence_estimation_histogram (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket]): - The intervals [min_probability, max_probability) do not - overlap. If a value doesn't correspond to any such interval, - the associated frequency is zero. For example, the following - records: {min_probability: 0, max_probability: 0.1, - frequency: 17} {min_probability: 0.2, max_probability: 0.3, - frequency: 42} {min_probability: 0.3, max_probability: 0.4, - frequency: 99} mean that there are no record with an - estimated probability in [0.1, 0.2) nor larger or equal to - 0.4. - """ - - class DeltaPresenceEstimationQuasiIdValues(proto.Message): - r"""A tuple of values for the quasi-identifier columns. - - Attributes: - quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - The quasi-identifier values. - estimated_probability (float): - The estimated probability that a given individual sharing - these quasi-identifier values is in the dataset. This value, - typically called δ, is the ratio between the number of - records in the dataset with these quasi-identifier values, - and the total number of individuals (inside *and* outside - the dataset) with these quasi-identifier values. For - example, if there are 15 individuals in the dataset who - share the same quasi-identifier values, and an estimated 100 - people in the entire population with these values, then δ is - 0.15. - """ - - quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - estimated_probability: float = proto.Field( - proto.DOUBLE, - number=2, - ) - - class DeltaPresenceEstimationHistogramBucket(proto.Message): - r"""A DeltaPresenceEstimationHistogramBucket message with the following - values: min_probability: 0.1 max_probability: 0.2 frequency: 42 - means that there are 42 records for which δ is in [0.1, 0.2). An - important particular case is when min_probability = max_probability - = 1: then, every individual who shares this quasi-identifier - combination is in the dataset. - - Attributes: - min_probability (float): - Between 0 and 1. - max_probability (float): - Always greater than or equal to min_probability. - bucket_size (int): - Number of records within these probability - bounds. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues]): - Sample of quasi-identifier tuple values in - this bucket. The total number of classes - returned per bucket is capped at 20. - bucket_value_count (int): - Total number of distinct quasi-identifier - tuple values in this bucket. - """ - - min_probability: float = proto.Field( - proto.DOUBLE, - number=1, - ) - max_probability: float = proto.Field( - proto.DOUBLE, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=5, - ) - bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=7, - ) - - delta_presence_estimation_histogram: MutableSequence['AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket', - ) - - class RequestedRiskAnalysisOptions(proto.Message): - r"""Risk analysis options. - - Attributes: - job_config (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): - The job config for the risk job. - """ - - job_config: 'RiskAnalysisJobConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='RiskAnalysisJobConfig', - ) - - requested_privacy_metric: 'PrivacyMetric' = proto.Field( - proto.MESSAGE, - number=1, - message='PrivacyMetric', - ) - requested_source_table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=2, - message=storage.BigQueryTable, - ) - numerical_stats_result: NumericalStatsResult = proto.Field( - proto.MESSAGE, - number=3, - oneof='result', - message=NumericalStatsResult, - ) - categorical_stats_result: CategoricalStatsResult = proto.Field( - proto.MESSAGE, - number=4, - oneof='result', - message=CategoricalStatsResult, - ) - k_anonymity_result: KAnonymityResult = proto.Field( - proto.MESSAGE, - number=5, - oneof='result', - message=KAnonymityResult, - ) - l_diversity_result: LDiversityResult = proto.Field( - proto.MESSAGE, - number=6, - oneof='result', - message=LDiversityResult, - ) - k_map_estimation_result: KMapEstimationResult = proto.Field( - proto.MESSAGE, - number=7, - oneof='result', - message=KMapEstimationResult, - ) - delta_presence_estimation_result: DeltaPresenceEstimationResult = proto.Field( - proto.MESSAGE, - number=9, - oneof='result', - message=DeltaPresenceEstimationResult, - ) - requested_options: RequestedRiskAnalysisOptions = proto.Field( - proto.MESSAGE, - number=10, - message=RequestedRiskAnalysisOptions, - ) - - -class ValueFrequency(proto.Message): - r"""A value of a field, including its frequency. - - Attributes: - value (google.cloud.dlp_v2.types.Value): - A value contained in the field in question. - count (int): - How many times the value is contained in the - field. - """ - - value: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - count: int = proto.Field( - proto.INT64, - number=2, - ) - - -class Value(proto.Message): - r"""Set of primitive values supported by the system. Note that for the - purposes of inspection or transformation, the number of bytes - considered to comprise a 'Value' is based on its representation as a - UTF-8 encoded string. For example, if 'integer_value' is set to - 123456789, the number of bytes would be counted as 9, even though an - int64 only holds up to 8 bytes of data. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - integer_value (int): - integer - - This field is a member of `oneof`_ ``type``. - float_value (float): - float - - This field is a member of `oneof`_ ``type``. - string_value (str): - string - - This field is a member of `oneof`_ ``type``. - boolean_value (bool): - boolean - - This field is a member of `oneof`_ ``type``. - timestamp_value (google.protobuf.timestamp_pb2.Timestamp): - timestamp - - This field is a member of `oneof`_ ``type``. - time_value (google.type.timeofday_pb2.TimeOfDay): - time of day - - This field is a member of `oneof`_ ``type``. - date_value (google.type.date_pb2.Date): - date - - This field is a member of `oneof`_ ``type``. - day_of_week_value (google.type.dayofweek_pb2.DayOfWeek): - day of week - - This field is a member of `oneof`_ ``type``. - """ - - integer_value: int = proto.Field( - proto.INT64, - number=1, - oneof='type', - ) - float_value: float = proto.Field( - proto.DOUBLE, - number=2, - oneof='type', - ) - string_value: str = proto.Field( - proto.STRING, - number=3, - oneof='type', - ) - boolean_value: bool = proto.Field( - proto.BOOL, - number=4, - oneof='type', - ) - timestamp_value: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message=timestamp_pb2.Timestamp, - ) - time_value: timeofday_pb2.TimeOfDay = proto.Field( - proto.MESSAGE, - number=6, - oneof='type', - message=timeofday_pb2.TimeOfDay, - ) - date_value: date_pb2.Date = proto.Field( - proto.MESSAGE, - number=7, - oneof='type', - message=date_pb2.Date, - ) - day_of_week_value: dayofweek_pb2.DayOfWeek = proto.Field( - proto.ENUM, - number=8, - oneof='type', - enum=dayofweek_pb2.DayOfWeek, - ) - - -class QuoteInfo(proto.Message): - r"""Message for infoType-dependent details parsed from quote. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - date_time (google.cloud.dlp_v2.types.DateTime): - The date time indicated by the quote. - - This field is a member of `oneof`_ ``parsed_quote``. - """ - - date_time: 'DateTime' = proto.Field( - proto.MESSAGE, - number=2, - oneof='parsed_quote', - message='DateTime', - ) - - -class DateTime(proto.Message): - r"""Message for a date time object. - e.g. 2018-01-01, 5th August. - - Attributes: - date (google.type.date_pb2.Date): - One or more of the following must be set. - Must be a valid date or time value. - day_of_week (google.type.dayofweek_pb2.DayOfWeek): - Day of week - time (google.type.timeofday_pb2.TimeOfDay): - Time of day - time_zone (google.cloud.dlp_v2.types.DateTime.TimeZone): - Time zone - """ - - class TimeZone(proto.Message): - r"""Time zone of the date time object. - - Attributes: - offset_minutes (int): - Set only if the offset can be determined. - Positive for time ahead of UTC. E.g. For - "UTC-9", this value is -540. - """ - - offset_minutes: int = proto.Field( - proto.INT32, - number=1, - ) - - date: date_pb2.Date = proto.Field( - proto.MESSAGE, - number=1, - message=date_pb2.Date, - ) - day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( - proto.ENUM, - number=2, - enum=dayofweek_pb2.DayOfWeek, - ) - time: timeofday_pb2.TimeOfDay = proto.Field( - proto.MESSAGE, - number=3, - message=timeofday_pb2.TimeOfDay, - ) - time_zone: TimeZone = proto.Field( - proto.MESSAGE, - number=4, - message=TimeZone, - ) - - -class DeidentifyConfig(proto.Message): - r"""The configuration that controls how the data will change. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): - Treat the dataset as free-form text and apply - the same free text transformation everywhere. - - This field is a member of `oneof`_ ``transformation``. - record_transformations (google.cloud.dlp_v2.types.RecordTransformations): - Treat the dataset as structured. - Transformations can be applied to specific - locations within structured datasets, such as - transforming a column within a table. - - This field is a member of `oneof`_ ``transformation``. - image_transformations (google.cloud.dlp_v2.types.ImageTransformations): - Treat the dataset as an image and redact. - - This field is a member of `oneof`_ ``transformation``. - transformation_error_handling (google.cloud.dlp_v2.types.TransformationErrorHandling): - Mode for handling transformation errors. If left - unspecified, the default mode is - ``TransformationErrorHandling.ThrowError``. - """ - - info_type_transformations: 'InfoTypeTransformations' = proto.Field( - proto.MESSAGE, - number=1, - oneof='transformation', - message='InfoTypeTransformations', - ) - record_transformations: 'RecordTransformations' = proto.Field( - proto.MESSAGE, - number=2, - oneof='transformation', - message='RecordTransformations', - ) - image_transformations: 'ImageTransformations' = proto.Field( - proto.MESSAGE, - number=4, - oneof='transformation', - message='ImageTransformations', - ) - transformation_error_handling: 'TransformationErrorHandling' = proto.Field( - proto.MESSAGE, - number=3, - message='TransformationErrorHandling', - ) - - -class ImageTransformations(proto.Message): - r"""A type of transformation that is applied over images. - - Attributes: - transforms (MutableSequence[google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation]): - - """ - - class ImageTransformation(proto.Message): - r"""Configuration for determining how redaction of images should - occur. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - selected_info_types (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.SelectedInfoTypes): - Apply transformation to the selected info_types. - - This field is a member of `oneof`_ ``target``. - all_info_types (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.AllInfoTypes): - Apply transformation to all findings not specified in other - ImageTransformation's selected_info_types. Only one instance - is allowed within the ImageTransformations message. - - This field is a member of `oneof`_ ``target``. - all_text (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.AllText): - Apply transformation to all text that doesn't - match an infoType. Only one instance is allowed - within the ImageTransformations message. - - This field is a member of `oneof`_ ``target``. - redaction_color (google.cloud.dlp_v2.types.Color): - The color to use when redacting content from - an image. If not specified, the default is - black. - """ - - class SelectedInfoTypes(proto.Message): - r"""Apply transformation to the selected info_types. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - Required. InfoTypes to apply the - transformation to. Required. Provided InfoType - must be unique within the ImageTransformations - message. - """ - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=storage.InfoType, - ) - - class AllInfoTypes(proto.Message): - r"""Apply transformation to all findings. - """ - - class AllText(proto.Message): - r"""Apply to all text. - """ - - selected_info_types: 'ImageTransformations.ImageTransformation.SelectedInfoTypes' = proto.Field( - proto.MESSAGE, - number=4, - oneof='target', - message='ImageTransformations.ImageTransformation.SelectedInfoTypes', - ) - all_info_types: 'ImageTransformations.ImageTransformation.AllInfoTypes' = proto.Field( - proto.MESSAGE, - number=5, - oneof='target', - message='ImageTransformations.ImageTransformation.AllInfoTypes', - ) - all_text: 'ImageTransformations.ImageTransformation.AllText' = proto.Field( - proto.MESSAGE, - number=6, - oneof='target', - message='ImageTransformations.ImageTransformation.AllText', - ) - redaction_color: 'Color' = proto.Field( - proto.MESSAGE, - number=3, - message='Color', - ) - - transforms: MutableSequence[ImageTransformation] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=ImageTransformation, - ) - - -class TransformationErrorHandling(proto.Message): - r"""How to handle transformation errors during de-identification. A - transformation error occurs when the requested transformation is - incompatible with the data. For example, trying to de-identify an IP - address using a ``DateShift`` transformation would result in a - transformation error, since date info cannot be extracted from an IP - address. Information about any incompatible transformations, and how - they were handled, is returned in the response as part of the - ``TransformationOverviews``. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - throw_error (google.cloud.dlp_v2.types.TransformationErrorHandling.ThrowError): - Throw an error - - This field is a member of `oneof`_ ``mode``. - leave_untransformed (google.cloud.dlp_v2.types.TransformationErrorHandling.LeaveUntransformed): - Ignore errors - - This field is a member of `oneof`_ ``mode``. - """ - - class ThrowError(proto.Message): - r"""Throw an error and fail the request when a transformation - error occurs. - - """ - - class LeaveUntransformed(proto.Message): - r"""Skips the data without modifying it if the requested transformation - would cause an error. For example, if a ``DateShift`` transformation - were applied an an IP address, this mode would leave the IP address - unchanged in the response. - - """ - - throw_error: ThrowError = proto.Field( - proto.MESSAGE, - number=1, - oneof='mode', - message=ThrowError, - ) - leave_untransformed: LeaveUntransformed = proto.Field( - proto.MESSAGE, - number=2, - oneof='mode', - message=LeaveUntransformed, - ) - - -class PrimitiveTransformation(proto.Message): - r"""A rule for transforming a value. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - replace_config (google.cloud.dlp_v2.types.ReplaceValueConfig): - Replace with a specified value. - - This field is a member of `oneof`_ ``transformation``. - redact_config (google.cloud.dlp_v2.types.RedactConfig): - Redact - - This field is a member of `oneof`_ ``transformation``. - character_mask_config (google.cloud.dlp_v2.types.CharacterMaskConfig): - Mask - - This field is a member of `oneof`_ ``transformation``. - crypto_replace_ffx_fpe_config (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig): - Ffx-Fpe - - This field is a member of `oneof`_ ``transformation``. - fixed_size_bucketing_config (google.cloud.dlp_v2.types.FixedSizeBucketingConfig): - Fixed size bucketing - - This field is a member of `oneof`_ ``transformation``. - bucketing_config (google.cloud.dlp_v2.types.BucketingConfig): - Bucketing - - This field is a member of `oneof`_ ``transformation``. - replace_with_info_type_config (google.cloud.dlp_v2.types.ReplaceWithInfoTypeConfig): - Replace with infotype - - This field is a member of `oneof`_ ``transformation``. - time_part_config (google.cloud.dlp_v2.types.TimePartConfig): - Time extraction - - This field is a member of `oneof`_ ``transformation``. - crypto_hash_config (google.cloud.dlp_v2.types.CryptoHashConfig): - Crypto - - This field is a member of `oneof`_ ``transformation``. - date_shift_config (google.cloud.dlp_v2.types.DateShiftConfig): - Date Shift - - This field is a member of `oneof`_ ``transformation``. - crypto_deterministic_config (google.cloud.dlp_v2.types.CryptoDeterministicConfig): - Deterministic Crypto - - This field is a member of `oneof`_ ``transformation``. - replace_dictionary_config (google.cloud.dlp_v2.types.ReplaceDictionaryConfig): - Replace with a value randomly drawn (with - replacement) from a dictionary. - - This field is a member of `oneof`_ ``transformation``. - """ - - replace_config: 'ReplaceValueConfig' = proto.Field( - proto.MESSAGE, - number=1, - oneof='transformation', - message='ReplaceValueConfig', - ) - redact_config: 'RedactConfig' = proto.Field( - proto.MESSAGE, - number=2, - oneof='transformation', - message='RedactConfig', - ) - character_mask_config: 'CharacterMaskConfig' = proto.Field( - proto.MESSAGE, - number=3, - oneof='transformation', - message='CharacterMaskConfig', - ) - crypto_replace_ffx_fpe_config: 'CryptoReplaceFfxFpeConfig' = proto.Field( - proto.MESSAGE, - number=4, - oneof='transformation', - message='CryptoReplaceFfxFpeConfig', - ) - fixed_size_bucketing_config: 'FixedSizeBucketingConfig' = proto.Field( - proto.MESSAGE, - number=5, - oneof='transformation', - message='FixedSizeBucketingConfig', - ) - bucketing_config: 'BucketingConfig' = proto.Field( - proto.MESSAGE, - number=6, - oneof='transformation', - message='BucketingConfig', - ) - replace_with_info_type_config: 'ReplaceWithInfoTypeConfig' = proto.Field( - proto.MESSAGE, - number=7, - oneof='transformation', - message='ReplaceWithInfoTypeConfig', - ) - time_part_config: 'TimePartConfig' = proto.Field( - proto.MESSAGE, - number=8, - oneof='transformation', - message='TimePartConfig', - ) - crypto_hash_config: 'CryptoHashConfig' = proto.Field( - proto.MESSAGE, - number=9, - oneof='transformation', - message='CryptoHashConfig', - ) - date_shift_config: 'DateShiftConfig' = proto.Field( - proto.MESSAGE, - number=11, - oneof='transformation', - message='DateShiftConfig', - ) - crypto_deterministic_config: 'CryptoDeterministicConfig' = proto.Field( - proto.MESSAGE, - number=12, - oneof='transformation', - message='CryptoDeterministicConfig', - ) - replace_dictionary_config: 'ReplaceDictionaryConfig' = proto.Field( - proto.MESSAGE, - number=13, - oneof='transformation', - message='ReplaceDictionaryConfig', - ) - - -class TimePartConfig(proto.Message): - r"""For use with ``Date``, ``Timestamp``, and ``TimeOfDay``, extract or - preserve a portion of the value. - - Attributes: - part_to_extract (google.cloud.dlp_v2.types.TimePartConfig.TimePart): - The part of the time to keep. - """ - class TimePart(proto.Enum): - r"""Components that make up time. - - Values: - TIME_PART_UNSPECIFIED (0): - Unused - YEAR (1): - [0-9999] - MONTH (2): - [1-12] - DAY_OF_MONTH (3): - [1-31] - DAY_OF_WEEK (4): - [1-7] - WEEK_OF_YEAR (5): - [1-53] - HOUR_OF_DAY (6): - [0-23] - """ - TIME_PART_UNSPECIFIED = 0 - YEAR = 1 - MONTH = 2 - DAY_OF_MONTH = 3 - DAY_OF_WEEK = 4 - WEEK_OF_YEAR = 5 - HOUR_OF_DAY = 6 - - part_to_extract: TimePart = proto.Field( - proto.ENUM, - number=1, - enum=TimePart, - ) - - -class CryptoHashConfig(proto.Message): - r"""Pseudonymization method that generates surrogates via - cryptographic hashing. Uses SHA-256. - The key size must be either 32 or 64 bytes. - Outputs a base64 encoded representation of the hashed output - (for example, L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=). - Currently, only string and integer values can be hashed. See - https://cloud.google.com/dlp/docs/pseudonymization to learn - more. - - Attributes: - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - The key used by the hash function. - """ - - crypto_key: 'CryptoKey' = proto.Field( - proto.MESSAGE, - number=1, - message='CryptoKey', - ) - - -class CryptoDeterministicConfig(proto.Message): - r"""Pseudonymization method that generates deterministic - encryption for the given input. Outputs a base64 encoded - representation of the encrypted output. Uses AES-SIV based on - the RFC https://tools.ietf.org/html/rfc5297. - - Attributes: - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - The key used by the encryption function. For - deterministic encryption using AES-SIV, the - provided key is internally expanded to 64 bytes - prior to use. - surrogate_info_type (google.cloud.dlp_v2.types.InfoType): - The custom info type to annotate the surrogate with. This - annotation will be applied to the surrogate by prefixing it - with the name of the custom info type followed by the number - of characters comprising the surrogate. The following scheme - defines the format: {info type name}({surrogate character - count}):{surrogate} - - For example, if the name of custom info type is - 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full - replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' - - This annotation identifies the surrogate when inspecting - content using the custom info type 'Surrogate'. This - facilitates reversal of the surrogate when it occurs in free - text. - - Note: For record transformations where the entire cell in a - table is being transformed, surrogates are not mandatory. - Surrogates are used to denote the location of the token and - are necessary for re-identification in free form text. - - In order for inspection to work properly, the name of this - info type must not occur naturally anywhere in your data; - otherwise, inspection may either - - - reverse a surrogate that does not correspond to an actual - identifier - - be unable to parse the surrogate and result in an error - - Therefore, choose your custom info type name carefully after - considering what your data looks like. One way to select a - name that has a high chance of yielding reliable detection - is to include one or more unicode characters that are highly - improbable to exist in your data. For example, assuming your - data is entered from a regular ASCII keyboard, the symbol - with the hex code point 29DD might be used like so: - ⧝MY_TOKEN_TYPE. - context (google.cloud.dlp_v2.types.FieldId): - A context may be used for higher security and maintaining - referential integrity such that the same identifier in two - different contexts will be given a distinct surrogate. The - context is appended to plaintext value being encrypted. On - decryption the provided context is validated against the - value used during encryption. If a context was provided - during encryption, same context must be provided during - decryption as well. - - If the context is not set, plaintext would be used as is for - encryption. If the context is set but: - - 1. there is no record present when transforming a given - value or - 2. the field is not present when transforming a given value, - - plaintext would be used as is for encryption. - - Note that case (1) is expected when an - ``InfoTypeTransformation`` is applied to both structured and - unstructured ``ContentItem``\ s. - """ - - crypto_key: 'CryptoKey' = proto.Field( - proto.MESSAGE, - number=1, - message='CryptoKey', - ) - surrogate_info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=2, - message=storage.InfoType, - ) - context: storage.FieldId = proto.Field( - proto.MESSAGE, - number=3, - message=storage.FieldId, - ) - - -class ReplaceValueConfig(proto.Message): - r"""Replace each input value with a given ``Value``. - - Attributes: - new_value (google.cloud.dlp_v2.types.Value): - Value to replace it with. - """ - - new_value: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - - -class ReplaceDictionaryConfig(proto.Message): - r"""Replace each input value with a value randomly selected from - the dictionary. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): - A list of words to select from for random replacement. The - `limits `__ page - contains details about the size limits of dictionaries. - - This field is a member of `oneof`_ ``type``. - """ - - word_list: storage.CustomInfoType.Dictionary.WordList = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.CustomInfoType.Dictionary.WordList, - ) - - -class ReplaceWithInfoTypeConfig(proto.Message): - r"""Replace each matching finding with the name of the info_type. - """ - - -class RedactConfig(proto.Message): - r"""Redact a given value. For example, if used with an - ``InfoTypeTransformation`` transforming PHONE_NUMBER, and input 'My - phone number is 206-555-0123', the output would be 'My phone number - is '. - - """ - - -class CharsToIgnore(proto.Message): - r"""Characters to skip when doing deidentification of a value. - These will be left alone and skipped. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - characters_to_skip (str): - Characters to not transform when masking. - - This field is a member of `oneof`_ ``characters``. - common_characters_to_ignore (google.cloud.dlp_v2.types.CharsToIgnore.CommonCharsToIgnore): - Common characters to not transform when - masking. Useful to avoid removing punctuation. - - This field is a member of `oneof`_ ``characters``. - """ - class CommonCharsToIgnore(proto.Enum): - r"""Convenience enum for indicating common characters to not - transform. - - Values: - COMMON_CHARS_TO_IGNORE_UNSPECIFIED (0): - Unused. - NUMERIC (1): - 0-9 - ALPHA_UPPER_CASE (2): - A-Z - ALPHA_LOWER_CASE (3): - a-z - PUNCTUATION (4): - US Punctuation, one of !"#$%&'()*+,-./:;<=>?@[]^_`{|}~ - WHITESPACE (5): - Whitespace character, one of [ \\t\n\x0B\f\r] - """ - COMMON_CHARS_TO_IGNORE_UNSPECIFIED = 0 - NUMERIC = 1 - ALPHA_UPPER_CASE = 2 - ALPHA_LOWER_CASE = 3 - PUNCTUATION = 4 - WHITESPACE = 5 - - characters_to_skip: str = proto.Field( - proto.STRING, - number=1, - oneof='characters', - ) - common_characters_to_ignore: CommonCharsToIgnore = proto.Field( - proto.ENUM, - number=2, - oneof='characters', - enum=CommonCharsToIgnore, - ) - - -class CharacterMaskConfig(proto.Message): - r"""Partially mask a string by replacing a given number of characters - with a fixed character. Masking can start from the beginning or end - of the string. This can be used on data of any type (numbers, longs, - and so on) and when de-identifying structured data we'll attempt to - preserve the original data's type. (This allows you to take a long - like 123 and modify it to a string like \**3. - - Attributes: - masking_character (str): - Character to use to mask the sensitive values—for example, - ``*`` for an alphabetic string such as a name, or ``0`` for - a numeric string such as ZIP code or credit card number. - This string must have a length of 1. If not supplied, this - value defaults to ``*`` for strings, and ``0`` for digits. - number_to_mask (int): - Number of characters to mask. If not set, all matching chars - will be masked. Skipped characters do not count towards this - tally. - - If ``number_to_mask`` is negative, this denotes inverse - masking. Cloud DLP masks all but a number of characters. For - example, suppose you have the following values: - - - ``masking_character`` is ``*`` - - ``number_to_mask`` is ``-4`` - - ``reverse_order`` is ``false`` - - ``CharsToIgnore`` includes ``-`` - - Input string is ``1234-5678-9012-3456`` - - The resulting de-identified string is - ``****-****-****-3456``. Cloud DLP masks all but the last - four characters. If ``reverse_order`` is ``true``, all but - the first four characters are masked as - ``1234-****-****-****``. - reverse_order (bool): - Mask characters in reverse order. For example, if - ``masking_character`` is ``0``, ``number_to_mask`` is - ``14``, and ``reverse_order`` is ``false``, then the input - string ``1234-5678-9012-3456`` is masked as - ``00000000000000-3456``. If ``masking_character`` is ``*``, - ``number_to_mask`` is ``3``, and ``reverse_order`` is - ``true``, then the string ``12345`` is masked as ``12***``. - characters_to_ignore (MutableSequence[google.cloud.dlp_v2.types.CharsToIgnore]): - When masking a string, items in this list will be skipped - when replacing characters. For example, if the input string - is ``555-555-5555`` and you instruct Cloud DLP to skip ``-`` - and mask 5 characters with ``*``, Cloud DLP returns - ``***-**5-5555``. - """ - - masking_character: str = proto.Field( - proto.STRING, - number=1, - ) - number_to_mask: int = proto.Field( - proto.INT32, - number=2, - ) - reverse_order: bool = proto.Field( - proto.BOOL, - number=3, - ) - characters_to_ignore: MutableSequence['CharsToIgnore'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='CharsToIgnore', - ) - - -class FixedSizeBucketingConfig(proto.Message): - r"""Buckets values based on fixed size ranges. The Bucketing - transformation can provide all of this functionality, but requires - more configuration. This message is provided as a convenience to the - user for simple bucketing strategies. - - The transformed value will be a hyphenated string of - {lower_bound}-{upper_bound}. For example, if lower_bound = 10 and - upper_bound = 20, all values that are within this bucket will be - replaced with "10-20". - - This can be used on data of type: double, long. - - If the bound Value type differs from the type of data being - transformed, we will first attempt converting the type of the data - to be transformed to match the type of the bound before comparing. - - See https://cloud.google.com/dlp/docs/concepts-bucketing to learn - more. - - Attributes: - lower_bound (google.cloud.dlp_v2.types.Value): - Required. Lower bound value of buckets. All values less than - ``lower_bound`` are grouped together into a single bucket; - for example if ``lower_bound`` = 10, then all values less - than 10 are replaced with the value "-10". - upper_bound (google.cloud.dlp_v2.types.Value): - Required. Upper bound value of buckets. All values greater - than upper_bound are grouped together into a single bucket; - for example if ``upper_bound`` = 89, then all values greater - than 89 are replaced with the value "89+". - bucket_size (float): - Required. Size of each bucket (except for minimum and - maximum buckets). So if ``lower_bound`` = 10, - ``upper_bound`` = 89, and ``bucket_size`` = 10, then the - following buckets would be used: -10, 10-20, 20-30, 30-40, - 40-50, 50-60, 60-70, 70-80, 80-89, 89+. Precision up to 2 - decimals works. - """ - - lower_bound: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - upper_bound: 'Value' = proto.Field( - proto.MESSAGE, - number=2, - message='Value', - ) - bucket_size: float = proto.Field( - proto.DOUBLE, - number=3, - ) - - -class BucketingConfig(proto.Message): - r"""Generalization function that buckets values based on ranges. The - ranges and replacement values are dynamically provided by the user - for custom behavior, such as 1-30 -> LOW 31-65 -> MEDIUM 66-100 -> - HIGH This can be used on data of type: number, long, string, - timestamp. If the bound ``Value`` type differs from the type of data - being transformed, we will first attempt converting the type of the - data to be transformed to match the type of the bound before - comparing. See https://cloud.google.com/dlp/docs/concepts-bucketing - to learn more. - - Attributes: - buckets (MutableSequence[google.cloud.dlp_v2.types.BucketingConfig.Bucket]): - Set of buckets. Ranges must be - non-overlapping. - """ - - class Bucket(proto.Message): - r"""Bucket is represented as a range, along with replacement - values. - - Attributes: - min_ (google.cloud.dlp_v2.types.Value): - Lower bound of the range, inclusive. Type - should be the same as max if used. - max_ (google.cloud.dlp_v2.types.Value): - Upper bound of the range, exclusive; type - must match min. - replacement_value (google.cloud.dlp_v2.types.Value): - Required. Replacement value for this bucket. - """ - - min_: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - max_: 'Value' = proto.Field( - proto.MESSAGE, - number=2, - message='Value', - ) - replacement_value: 'Value' = proto.Field( - proto.MESSAGE, - number=3, - message='Value', - ) - - buckets: MutableSequence[Bucket] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=Bucket, - ) - - -class CryptoReplaceFfxFpeConfig(proto.Message): - r"""Replaces an identifier with a surrogate using Format Preserving - Encryption (FPE) with the FFX mode of operation; however when used - in the ``ReidentifyContent`` API method, it serves the opposite - function by reversing the surrogate back into the original - identifier. The identifier must be encoded as ASCII. For a given - crypto key and context, the same identifier will be replaced with - the same surrogate. Identifiers must be at least two characters - long. In the case that the identifier is the empty string, it will - be skipped. See https://cloud.google.com/dlp/docs/pseudonymization - to learn more. - - Note: We recommend using CryptoDeterministicConfig for all use cases - which do not require preserving the input alphabet space and size, - plus warrant referential integrity. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - Required. The key used by the encryption - algorithm. - context (google.cloud.dlp_v2.types.FieldId): - The 'tweak', a context may be used for higher security since - the same identifier in two different contexts won't be given - the same surrogate. If the context is not set, a default - tweak will be used. - - If the context is set but: - - 1. there is no record present when transforming a given - value or - 2. the field is not present when transforming a given value, - - a default tweak will be used. - - Note that case (1) is expected when an - ``InfoTypeTransformation`` is applied to both structured and - unstructured ``ContentItem``\ s. Currently, the referenced - field may be of value type integer or string. - - The tweak is constructed as a sequence of bytes in big - endian byte order such that: - - - a 64 bit integer is encoded followed by a single byte of - value 1 - - a string is encoded in UTF-8 format followed by a single - byte of value 2 - common_alphabet (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig.FfxCommonNativeAlphabet): - Common alphabets. - - This field is a member of `oneof`_ ``alphabet``. - custom_alphabet (str): - This is supported by mapping these to the alphanumeric - characters that the FFX mode natively supports. This happens - before/after encryption/decryption. Each character listed - must appear only once. Number of characters must be in the - range [2, 95]. This must be encoded as ASCII. The order of - characters does not matter. The full list of allowed - characters is: - 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz - ~`!@#$%^&*()_-+={[}]|:;"'<,>.?/ - - This field is a member of `oneof`_ ``alphabet``. - radix (int): - The native way to select the alphabet. Must be in the range - [2, 95]. - - This field is a member of `oneof`_ ``alphabet``. - surrogate_info_type (google.cloud.dlp_v2.types.InfoType): - The custom infoType to annotate the surrogate with. This - annotation will be applied to the surrogate by prefixing it - with the name of the custom infoType followed by the number - of characters comprising the surrogate. The following scheme - defines the format: - info_type_name(surrogate_character_count):surrogate - - For example, if the name of custom infoType is - 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full - replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' - - This annotation identifies the surrogate when inspecting - content using the custom infoType - ```SurrogateType`` `__. - This facilitates reversal of the surrogate when it occurs in - free text. - - In order for inspection to work properly, the name of this - infoType must not occur naturally anywhere in your data; - otherwise, inspection may find a surrogate that does not - correspond to an actual identifier. Therefore, choose your - custom infoType name carefully after considering what your - data looks like. One way to select a name that has a high - chance of yielding reliable detection is to include one or - more unicode characters that are highly improbable to exist - in your data. For example, assuming your data is entered - from a regular ASCII keyboard, the symbol with the hex code - point 29DD might be used like so: ⧝MY_TOKEN_TYPE - """ - class FfxCommonNativeAlphabet(proto.Enum): - r"""These are commonly used subsets of the alphabet that the FFX - mode natively supports. In the algorithm, the alphabet is - selected using the "radix". Therefore each corresponds to a - particular radix. - - Values: - FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED (0): - Unused. - NUMERIC (1): - ``[0-9]`` (radix of 10) - HEXADECIMAL (2): - ``[0-9A-F]`` (radix of 16) - UPPER_CASE_ALPHA_NUMERIC (3): - ``[0-9A-Z]`` (radix of 36) - ALPHA_NUMERIC (4): - ``[0-9A-Za-z]`` (radix of 62) - """ - FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED = 0 - NUMERIC = 1 - HEXADECIMAL = 2 - UPPER_CASE_ALPHA_NUMERIC = 3 - ALPHA_NUMERIC = 4 - - crypto_key: 'CryptoKey' = proto.Field( - proto.MESSAGE, - number=1, - message='CryptoKey', - ) - context: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - common_alphabet: FfxCommonNativeAlphabet = proto.Field( - proto.ENUM, - number=4, - oneof='alphabet', - enum=FfxCommonNativeAlphabet, - ) - custom_alphabet: str = proto.Field( - proto.STRING, - number=5, - oneof='alphabet', - ) - radix: int = proto.Field( - proto.INT32, - number=6, - oneof='alphabet', - ) - surrogate_info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=8, - message=storage.InfoType, - ) - - -class CryptoKey(proto.Message): - r"""This is a data encryption key (DEK) (as opposed to - a key encryption key (KEK) stored by Cloud Key Management - Service (Cloud KMS). - When using Cloud KMS to wrap or unwrap a DEK, be sure to set an - appropriate IAM policy on the KEK to ensure an attacker cannot - unwrap the DEK. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - transient (google.cloud.dlp_v2.types.TransientCryptoKey): - Transient crypto key - - This field is a member of `oneof`_ ``source``. - unwrapped (google.cloud.dlp_v2.types.UnwrappedCryptoKey): - Unwrapped crypto key - - This field is a member of `oneof`_ ``source``. - kms_wrapped (google.cloud.dlp_v2.types.KmsWrappedCryptoKey): - Key wrapped using Cloud KMS - - This field is a member of `oneof`_ ``source``. - """ - - transient: 'TransientCryptoKey' = proto.Field( - proto.MESSAGE, - number=1, - oneof='source', - message='TransientCryptoKey', - ) - unwrapped: 'UnwrappedCryptoKey' = proto.Field( - proto.MESSAGE, - number=2, - oneof='source', - message='UnwrappedCryptoKey', - ) - kms_wrapped: 'KmsWrappedCryptoKey' = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message='KmsWrappedCryptoKey', - ) - - -class TransientCryptoKey(proto.Message): - r"""Use this to have a random data crypto key generated. - It will be discarded after the request finishes. - - Attributes: - name (str): - Required. Name of the key. This is an arbitrary string used - to differentiate different keys. A unique key is generated - per name: two separate ``TransientCryptoKey`` protos share - the same generated key if their names are the same. When the - data crypto key is generated, this name is not used in any - way (repeating the api call will result in a different key - being generated). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UnwrappedCryptoKey(proto.Message): - r"""Using raw keys is prone to security risks due to accidentally - leaking the key. Choose another type of key if possible. - - Attributes: - key (bytes): - Required. A 128/192/256 bit key. - """ - - key: bytes = proto.Field( - proto.BYTES, - number=1, - ) - - -class KmsWrappedCryptoKey(proto.Message): - r"""Include to use an existing data crypto key wrapped by KMS. The - wrapped key must be a 128-, 192-, or 256-bit key. Authorization - requires the following IAM permissions when sending a request to - perform a crypto transformation using a KMS-wrapped crypto key: - dlp.kms.encrypt - - For more information, see [Creating a wrapped key] - (https://cloud.google.com/dlp/docs/create-wrapped-key). - - Note: When you use Cloud KMS for cryptographic operations, `charges - apply `__. - - Attributes: - wrapped_key (bytes): - Required. The wrapped data crypto key. - crypto_key_name (str): - Required. The resource name of the KMS - CryptoKey to use for unwrapping. - """ - - wrapped_key: bytes = proto.Field( - proto.BYTES, - number=1, - ) - crypto_key_name: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DateShiftConfig(proto.Message): - r"""Shifts dates by random number of days, with option to be - consistent for the same context. See - https://cloud.google.com/dlp/docs/concepts-date-shifting to - learn more. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - upper_bound_days (int): - Required. Range of shift in days. Actual - shift will be selected at random within this - range (inclusive ends). Negative means shift to - earlier in time. Must not be more than 365250 - days (1000 years) each direction. - For example, 3 means shift date to at most 3 - days into the future. - lower_bound_days (int): - Required. For example, -5 means shift date to - at most 5 days back in the past. - context (google.cloud.dlp_v2.types.FieldId): - Points to the field that contains the - context, for example, an entity id. If set, must - also set cryptoKey. If set, shift will be - consistent for the given context. - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - Causes the shift to be computed based on this key and the - context. This results in the same shift for the same context - and crypto_key. If set, must also set context. Can only be - applied to table items. - - This field is a member of `oneof`_ ``method``. - """ - - upper_bound_days: int = proto.Field( - proto.INT32, - number=1, - ) - lower_bound_days: int = proto.Field( - proto.INT32, - number=2, - ) - context: storage.FieldId = proto.Field( - proto.MESSAGE, - number=3, - message=storage.FieldId, - ) - crypto_key: 'CryptoKey' = proto.Field( - proto.MESSAGE, - number=4, - oneof='method', - message='CryptoKey', - ) - - -class InfoTypeTransformations(proto.Message): - r"""A type of transformation that will scan unstructured text and apply - various ``PrimitiveTransformation``\ s to each finding, where the - transformation is applied to only values that were identified as a - specific info_type. - - Attributes: - transformations (MutableSequence[google.cloud.dlp_v2.types.InfoTypeTransformations.InfoTypeTransformation]): - Required. Transformation for each infoType. - Cannot specify more than one for a given - infoType. - """ - - class InfoTypeTransformation(proto.Message): - r"""A transformation to apply to text that is identified as a specific - info_type. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - InfoTypes to apply the transformation to. An empty list will - cause this transformation to apply to all findings that - correspond to infoTypes that were requested in - ``InspectConfig``. - primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): - Required. Primitive transformation to apply - to the infoType. - """ - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - primitive_transformation: 'PrimitiveTransformation' = proto.Field( - proto.MESSAGE, - number=2, - message='PrimitiveTransformation', - ) - - transformations: MutableSequence[InfoTypeTransformation] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=InfoTypeTransformation, - ) - - -class FieldTransformation(proto.Message): - r"""The transformation to apply to the field. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Required. Input field(s) to apply the transformation to. - When you have columns that reference their position within a - list, omit the index from the FieldId. FieldId name matching - ignores the index. For example, instead of - "contact.nums[0].type", use "contact.nums.type". - condition (google.cloud.dlp_v2.types.RecordCondition): - Only apply the transformation if the condition evaluates to - true for the given ``RecordCondition``. The conditions are - allowed to reference fields that are not used in the actual - transformation. - - Example Use Cases: - - - Apply a different bucket transformation to an age column - if the zip code column for the same record is within a - specific range. - - Redact a field if the date of birth field is greater than - 85. - primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): - Apply the transformation to the entire field. - - This field is a member of `oneof`_ ``transformation``. - info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): - Treat the contents of the field as free text, and - selectively transform content that matches an ``InfoType``. - - This field is a member of `oneof`_ ``transformation``. - """ - - fields: MutableSequence[storage.FieldId] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - condition: 'RecordCondition' = proto.Field( - proto.MESSAGE, - number=3, - message='RecordCondition', - ) - primitive_transformation: 'PrimitiveTransformation' = proto.Field( - proto.MESSAGE, - number=4, - oneof='transformation', - message='PrimitiveTransformation', - ) - info_type_transformations: 'InfoTypeTransformations' = proto.Field( - proto.MESSAGE, - number=5, - oneof='transformation', - message='InfoTypeTransformations', - ) - - -class RecordTransformations(proto.Message): - r"""A type of transformation that is applied over structured data - such as a table. - - Attributes: - field_transformations (MutableSequence[google.cloud.dlp_v2.types.FieldTransformation]): - Transform the record by applying various - field transformations. - record_suppressions (MutableSequence[google.cloud.dlp_v2.types.RecordSuppression]): - Configuration defining which records get - suppressed entirely. Records that match any - suppression rule are omitted from the output. - """ - - field_transformations: MutableSequence['FieldTransformation'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='FieldTransformation', - ) - record_suppressions: MutableSequence['RecordSuppression'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='RecordSuppression', - ) - - -class RecordSuppression(proto.Message): - r"""Configuration to suppress records whose suppression - conditions evaluate to true. - - Attributes: - condition (google.cloud.dlp_v2.types.RecordCondition): - A condition that when it evaluates to true - will result in the record being evaluated to be - suppressed from the transformed content. - """ - - condition: 'RecordCondition' = proto.Field( - proto.MESSAGE, - number=1, - message='RecordCondition', - ) - - -class RecordCondition(proto.Message): - r"""A condition for determining whether a transformation should - be applied to a field. - - Attributes: - expressions (google.cloud.dlp_v2.types.RecordCondition.Expressions): - An expression. - """ - - class Condition(proto.Message): - r"""The field type of ``value`` and ``field`` do not need to match to be - considered equal, but not all comparisons are possible. EQUAL_TO and - NOT_EQUAL_TO attempt to compare even with incompatible types, but - all other comparisons are invalid with incompatible types. A - ``value`` of type: - - - ``string`` can be compared against all other types - - ``boolean`` can only be compared against other booleans - - ``integer`` can be compared against doubles or a string if the - string value can be parsed as an integer. - - ``double`` can be compared against integers or a string if the - string can be parsed as a double. - - ``Timestamp`` can be compared against strings in RFC 3339 date - string format. - - ``TimeOfDay`` can be compared against timestamps and strings in - the format of 'HH:mm:ss'. - - If we fail to compare do to type mismatch, a warning will be given - and the condition will evaluate to false. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Required. Field within the record this - condition is evaluated against. - operator (google.cloud.dlp_v2.types.RelationalOperator): - Required. Operator used to compare the field - or infoType to the value. - value (google.cloud.dlp_v2.types.Value): - Value to compare against. [Mandatory, except for ``EXISTS`` - tests.] - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - operator: 'RelationalOperator' = proto.Field( - proto.ENUM, - number=3, - enum='RelationalOperator', - ) - value: 'Value' = proto.Field( - proto.MESSAGE, - number=4, - message='Value', - ) - - class Conditions(proto.Message): - r"""A collection of conditions. - - Attributes: - conditions (MutableSequence[google.cloud.dlp_v2.types.RecordCondition.Condition]): - A collection of conditions. - """ - - conditions: MutableSequence['RecordCondition.Condition'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='RecordCondition.Condition', - ) - - class Expressions(proto.Message): - r"""An expression, consisting of an operator and conditions. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - logical_operator (google.cloud.dlp_v2.types.RecordCondition.Expressions.LogicalOperator): - The operator to apply to the result of conditions. Default - and currently only supported value is ``AND``. - conditions (google.cloud.dlp_v2.types.RecordCondition.Conditions): - Conditions to apply to the expression. - - This field is a member of `oneof`_ ``type``. - """ - class LogicalOperator(proto.Enum): - r"""Logical operators for conditional checks. - - Values: - LOGICAL_OPERATOR_UNSPECIFIED (0): - Unused - AND (1): - Conditional AND - """ - LOGICAL_OPERATOR_UNSPECIFIED = 0 - AND = 1 - - logical_operator: 'RecordCondition.Expressions.LogicalOperator' = proto.Field( - proto.ENUM, - number=1, - enum='RecordCondition.Expressions.LogicalOperator', - ) - conditions: 'RecordCondition.Conditions' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='RecordCondition.Conditions', - ) - - expressions: Expressions = proto.Field( - proto.MESSAGE, - number=3, - message=Expressions, - ) - - -class TransformationOverview(proto.Message): - r"""Overview of the modifications that occurred. - - Attributes: - transformed_bytes (int): - Total size in bytes that were transformed in - some way. - transformation_summaries (MutableSequence[google.cloud.dlp_v2.types.TransformationSummary]): - Transformations applied to the dataset. - """ - - transformed_bytes: int = proto.Field( - proto.INT64, - number=2, - ) - transformation_summaries: MutableSequence['TransformationSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='TransformationSummary', - ) - - -class TransformationSummary(proto.Message): - r"""Summary of a single transformation. Only one of 'transformation', - 'field_transformation', or 'record_suppress' will be set. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - Set if the transformation was limited to a - specific InfoType. - field (google.cloud.dlp_v2.types.FieldId): - Set if the transformation was limited to a - specific FieldId. - transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): - The specific transformation these stats apply - to. - field_transformations (MutableSequence[google.cloud.dlp_v2.types.FieldTransformation]): - The field transformation that was applied. - If multiple field transformations are requested - for a single field, this list will contain all - of them; otherwise, only one is supplied. - record_suppress (google.cloud.dlp_v2.types.RecordSuppression): - The specific suppression option these stats - apply to. - results (MutableSequence[google.cloud.dlp_v2.types.TransformationSummary.SummaryResult]): - Collection of all transformations that took - place or had an error. - transformed_bytes (int): - Total size in bytes that were transformed in - some way. - """ - class TransformationResultCode(proto.Enum): - r"""Possible outcomes of transformations. - - Values: - TRANSFORMATION_RESULT_CODE_UNSPECIFIED (0): - Unused - SUCCESS (1): - Transformation completed without an error. - ERROR (2): - Transformation had an error. - """ - TRANSFORMATION_RESULT_CODE_UNSPECIFIED = 0 - SUCCESS = 1 - ERROR = 2 - - class SummaryResult(proto.Message): - r"""A collection that informs the user the number of times a particular - ``TransformationResultCode`` and error details occurred. - - Attributes: - count (int): - Number of transformations counted by this - result. - code (google.cloud.dlp_v2.types.TransformationSummary.TransformationResultCode): - Outcome of the transformation. - details (str): - A place for warnings or errors to show up if - a transformation didn't work as expected. - """ - - count: int = proto.Field( - proto.INT64, - number=1, - ) - code: 'TransformationSummary.TransformationResultCode' = proto.Field( - proto.ENUM, - number=2, - enum='TransformationSummary.TransformationResultCode', - ) - details: str = proto.Field( - proto.STRING, - number=3, - ) - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - transformation: 'PrimitiveTransformation' = proto.Field( - proto.MESSAGE, - number=3, - message='PrimitiveTransformation', - ) - field_transformations: MutableSequence['FieldTransformation'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='FieldTransformation', - ) - record_suppress: 'RecordSuppression' = proto.Field( - proto.MESSAGE, - number=6, - message='RecordSuppression', - ) - results: MutableSequence[SummaryResult] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=SummaryResult, - ) - transformed_bytes: int = proto.Field( - proto.INT64, - number=7, - ) - - -class TransformationDescription(proto.Message): - r"""A flattened description of a ``PrimitiveTransformation`` or - ``RecordSuppression``. - - Attributes: - type_ (google.cloud.dlp_v2.types.TransformationType): - The transformation type. - description (str): - A description of the transformation. This is empty for a - RECORD_SUPPRESSION, or is the output of calling toString() - on the ``PrimitiveTransformation`` protocol buffer message - for any other type of transformation. - condition (str): - A human-readable string representation of the - ``RecordCondition`` corresponding to this transformation. - Set if a ``RecordCondition`` was used to determine whether - or not to apply this transformation. - - Examples: \* (age_field > 85) \* (age_field <= 18) \* - (zip_field exists) \* (zip_field == 01234) && (city_field != - "Springville") \* (zip_field == 01234) && (age_field <= 18) - && (city_field exists) - info_type (google.cloud.dlp_v2.types.InfoType): - Set if the transformation was limited to a specific - ``InfoType``. - """ - - type_: 'TransformationType' = proto.Field( - proto.ENUM, - number=1, - enum='TransformationType', - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - condition: str = proto.Field( - proto.STRING, - number=3, - ) - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=4, - message=storage.InfoType, - ) - - -class TransformationDetails(proto.Message): - r"""Details about a single transformation. This object contains a - description of the transformation, information about whether the - transformation was successfully applied, and the precise - location where the transformation occurred. These details are - stored in a user-specified BigQuery table. - - Attributes: - resource_name (str): - The name of the job that completed the - transformation. - container_name (str): - The top level name of the container where the - transformation is located (this will be the - source file name or table name). - transformation (MutableSequence[google.cloud.dlp_v2.types.TransformationDescription]): - Description of transformation. This would only contain more - than one element if there were multiple matching - transformations and which one to apply was ambiguous. Not - set for states that contain no transformation, currently - only state that contains no transformation is - TransformationResultStateType.METADATA_UNRETRIEVABLE. - status_details (google.cloud.dlp_v2.types.TransformationResultStatus): - Status of the transformation, if - transformation was not successful, this will - specify what caused it to fail, otherwise it - will show that the transformation was - successful. - transformed_bytes (int): - The number of bytes that were transformed. If - transformation was unsuccessful or did not take - place because there was no content to transform, - this will be zero. - transformation_location (google.cloud.dlp_v2.types.TransformationLocation): - The precise location of the transformed - content in the original container. - """ - - resource_name: str = proto.Field( - proto.STRING, - number=1, - ) - container_name: str = proto.Field( - proto.STRING, - number=2, - ) - transformation: MutableSequence['TransformationDescription'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='TransformationDescription', - ) - status_details: 'TransformationResultStatus' = proto.Field( - proto.MESSAGE, - number=4, - message='TransformationResultStatus', - ) - transformed_bytes: int = proto.Field( - proto.INT64, - number=5, - ) - transformation_location: 'TransformationLocation' = proto.Field( - proto.MESSAGE, - number=6, - message='TransformationLocation', - ) - - -class TransformationLocation(proto.Message): - r"""Specifies the location of a transformation. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - finding_id (str): - For infotype transformations, link to the - corresponding findings ID so that location - information does not need to be duplicated. Each - findings ID correlates to an entry in the - findings output table, this table only gets - created when users specify to save findings (add - the save findings action to the request). - - This field is a member of `oneof`_ ``location_type``. - record_transformation (google.cloud.dlp_v2.types.RecordTransformation): - For record transformations, provide a field - and container information. - - This field is a member of `oneof`_ ``location_type``. - container_type (google.cloud.dlp_v2.types.TransformationContainerType): - Information about the functionality of the - container where this finding occurred, if - available. - """ - - finding_id: str = proto.Field( - proto.STRING, - number=1, - oneof='location_type', - ) - record_transformation: 'RecordTransformation' = proto.Field( - proto.MESSAGE, - number=2, - oneof='location_type', - message='RecordTransformation', - ) - container_type: 'TransformationContainerType' = proto.Field( - proto.ENUM, - number=3, - enum='TransformationContainerType', - ) - - -class RecordTransformation(proto.Message): - r""" - - Attributes: - field_id (google.cloud.dlp_v2.types.FieldId): - For record transformations, provide a field. - container_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Findings container modification timestamp, if - applicable. - container_version (str): - Container version, if available ("generation" - for Cloud Storage). - """ - - field_id: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - container_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - container_version: str = proto.Field( - proto.STRING, - number=3, - ) - - -class TransformationResultStatus(proto.Message): - r""" - - Attributes: - result_status_type (google.cloud.dlp_v2.types.TransformationResultStatusType): - Transformation result status type, this will - be either SUCCESS, or it will be the reason for - why the transformation was not completely - successful. - details (google.rpc.status_pb2.Status): - Detailed error codes and messages - """ - - result_status_type: 'TransformationResultStatusType' = proto.Field( - proto.ENUM, - number=1, - enum='TransformationResultStatusType', - ) - details: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=2, - message=status_pb2.Status, - ) - - -class TransformationDetailsStorageConfig(proto.Message): - r"""Config for storing transformation details. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - The BigQuery table in which to store the output. This may be - an existing table or in a new table in an existing dataset. - If table_id is not set a new one will be generated for you - with the following format: - dlp_googleapis_transformation_details_yyyy_mm_dd_[dlp_job_id]. - Pacific time zone will be used for generating the date - details. - - This field is a member of `oneof`_ ``type``. - """ - - table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.BigQueryTable, - ) - - -class Schedule(proto.Message): - r"""Schedule for inspect job triggers. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - recurrence_period_duration (google.protobuf.duration_pb2.Duration): - With this option a job is started on a - regular periodic basis. For example: every day - (86400 seconds). - A scheduled start time will be skipped if the - previous execution has not ended when its - scheduled time occurs. - This value must be set to a time duration - greater than or equal to 1 day and can be no - longer than 60 days. - - This field is a member of `oneof`_ ``option``. - """ - - recurrence_period_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=1, - oneof='option', - message=duration_pb2.Duration, - ) - - -class Manual(proto.Message): - r"""Job trigger option for hybrid jobs. Jobs must be manually - created and finished. - - """ - - -class InspectTemplate(proto.Message): - r"""The inspectTemplate contains a configuration (set of types of - sensitive data to be detected) to be used anywhere you otherwise - would normally specify InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates to learn - more. - - Attributes: - name (str): - Output only. The template name. - - The template will have one of the following formats: - ``projects/PROJECT_ID/inspectTemplates/TEMPLATE_ID`` OR - ``organizations/ORGANIZATION_ID/inspectTemplates/TEMPLATE_ID``; - display_name (str): - Display name (max 256 chars). - description (str): - Short description (max 256 chars). - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of an - inspectTemplate. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of an - inspectTemplate. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - The core content of the template. - Configuration of the scanning process. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=6, - message='InspectConfig', - ) - - -class DeidentifyTemplate(proto.Message): - r"""DeidentifyTemplates contains instructions on how to - de-identify content. See - https://cloud.google.com/dlp/docs/concepts-templates to learn - more. - - Attributes: - name (str): - Output only. The template name. - - The template will have one of the following formats: - ``projects/PROJECT_ID/deidentifyTemplates/TEMPLATE_ID`` OR - ``organizations/ORGANIZATION_ID/deidentifyTemplates/TEMPLATE_ID`` - display_name (str): - Display name (max 256 chars). - description (str): - Short description (max 256 chars). - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of an - inspectTemplate. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of an - inspectTemplate. - deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): - The core content of the template. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - deidentify_config: 'DeidentifyConfig' = proto.Field( - proto.MESSAGE, - number=6, - message='DeidentifyConfig', - ) - - -class Error(proto.Message): - r"""Details information about an error encountered during job - execution or the results of an unsuccessful activation of the - JobTrigger. - - Attributes: - details (google.rpc.status_pb2.Status): - Detailed error codes and messages. - timestamps (MutableSequence[google.protobuf.timestamp_pb2.Timestamp]): - The times the error occurred. - """ - - details: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=1, - message=status_pb2.Status, - ) - timestamps: MutableSequence[timestamp_pb2.Timestamp] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class JobTrigger(proto.Message): - r"""Contains a configuration to make dlp api calls on a repeating - basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers to learn - more. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Unique resource name for the triggeredJob, assigned by the - service when the triggeredJob is created, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - display_name (str): - Display name (max 100 chars) - description (str): - User provided description (max 256 chars) - inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): - For inspect jobs, a snapshot of the - configuration. - - This field is a member of `oneof`_ ``job``. - triggers (MutableSequence[google.cloud.dlp_v2.types.JobTrigger.Trigger]): - A list of triggers which will be OR'ed - together. Only one in the list needs to trigger - for a job to be started. The list may contain - only a single Schedule trigger and must have at - least one object. - errors (MutableSequence[google.cloud.dlp_v2.types.Error]): - Output only. A stream of errors encountered - when the trigger was activated. Repeated errors - may result in the JobTrigger automatically being - paused. Will return the last 100 errors. - Whenever the JobTrigger is modified this list - will be cleared. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of a - triggeredJob. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of a - triggeredJob. - last_run_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp of the last time - this trigger executed. - status (google.cloud.dlp_v2.types.JobTrigger.Status): - Required. A status for this trigger. - """ - class Status(proto.Enum): - r"""Whether the trigger is currently active. If PAUSED or - CANCELLED, no jobs will be created with this configuration. The - service may automatically pause triggers experiencing frequent - errors. To restart a job, set the status to HEALTHY after - correcting user errors. - - Values: - STATUS_UNSPECIFIED (0): - Unused. - HEALTHY (1): - Trigger is healthy. - PAUSED (2): - Trigger is temporarily paused. - CANCELLED (3): - Trigger is cancelled and can not be resumed. - """ - STATUS_UNSPECIFIED = 0 - HEALTHY = 1 - PAUSED = 2 - CANCELLED = 3 - - class Trigger(proto.Message): - r"""What event needs to occur for a new job to be started. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - schedule (google.cloud.dlp_v2.types.Schedule): - Create a job on a repeating basis based on - the elapse of time. - - This field is a member of `oneof`_ ``trigger``. - manual (google.cloud.dlp_v2.types.Manual): - For use with hybrid jobs. Jobs must be - manually created and finished. - - This field is a member of `oneof`_ ``trigger``. - """ - - schedule: 'Schedule' = proto.Field( - proto.MESSAGE, - number=1, - oneof='trigger', - message='Schedule', - ) - manual: 'Manual' = proto.Field( - proto.MESSAGE, - number=2, - oneof='trigger', - message='Manual', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - inspect_job: 'InspectJobConfig' = proto.Field( - proto.MESSAGE, - number=4, - oneof='job', - message='InspectJobConfig', - ) - triggers: MutableSequence[Trigger] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=Trigger, - ) - errors: MutableSequence['Error'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='Error', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - last_run_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - status: Status = proto.Field( - proto.ENUM, - number=10, - enum=Status, - ) - - -class Action(proto.Message): - r"""A task to execute on the completion of a job. - See https://cloud.google.com/dlp/docs/concepts-actions to learn - more. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - save_findings (google.cloud.dlp_v2.types.Action.SaveFindings): - Save resulting findings in a provided - location. - - This field is a member of `oneof`_ ``action``. - pub_sub (google.cloud.dlp_v2.types.Action.PublishToPubSub): - Publish a notification to a Pub/Sub topic. - - This field is a member of `oneof`_ ``action``. - publish_summary_to_cscc (google.cloud.dlp_v2.types.Action.PublishSummaryToCscc): - Publish summary to Cloud Security Command - Center (Alpha). - - This field is a member of `oneof`_ ``action``. - publish_findings_to_cloud_data_catalog (google.cloud.dlp_v2.types.Action.PublishFindingsToCloudDataCatalog): - Publish findings to Cloud Datahub. - - This field is a member of `oneof`_ ``action``. - deidentify (google.cloud.dlp_v2.types.Action.Deidentify): - Create a de-identified copy of the input - data. - - This field is a member of `oneof`_ ``action``. - job_notification_emails (google.cloud.dlp_v2.types.Action.JobNotificationEmails): - Sends an email when the job completes. The email goes to IAM - project owners and technical `Essential - Contacts `__. - - This field is a member of `oneof`_ ``action``. - publish_to_stackdriver (google.cloud.dlp_v2.types.Action.PublishToStackdriver): - Enable Stackdriver metric dlp.googleapis.com/finding_count. - - This field is a member of `oneof`_ ``action``. - """ - - class SaveFindings(proto.Message): - r"""If set, the detailed findings will be persisted to the - specified OutputStorageConfig. Only a single instance of this - action can be specified. - Compatible with: Inspect, Risk - - Attributes: - output_config (google.cloud.dlp_v2.types.OutputStorageConfig): - Location to store findings outside of DLP. - """ - - output_config: 'OutputStorageConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='OutputStorageConfig', - ) - - class PublishToPubSub(proto.Message): - r"""Publish a message into a given Pub/Sub topic when DlpJob has - completed. The message contains a single field, ``DlpJobName``, - which is equal to the finished job's - ```DlpJob.name`` `__. - Compatible with: Inspect, Risk - - Attributes: - topic (str): - Cloud Pub/Sub topic to send notifications to. - The topic must have given publishing access - rights to the DLP API service account executing - the long running DlpJob sending the - notifications. Format is - projects/{project}/topics/{topic}. - """ - - topic: str = proto.Field( - proto.STRING, - number=1, - ) - - class PublishSummaryToCscc(proto.Message): - r"""Publish the result summary of a DlpJob to the Cloud Security - Command Center (CSCC Alpha). - This action is only available for projects which are parts of an - organization and whitelisted for the alpha Cloud Security - Command Center. - The action will publish the count of finding instances and their - info types. The summary of findings will be persisted in CSCC - and are governed by CSCC service-specific policy, see - https://cloud.google.com/terms/service-terms Only a single - instance of this action can be specified. Compatible with: - Inspect - - """ - - class PublishFindingsToCloudDataCatalog(proto.Message): - r"""Publish findings of a DlpJob to Data Catalog. In Data Catalog, tag - templates are applied to the resource that Cloud DLP scanned. Data - Catalog tag templates are stored in the same project and region - where the BigQuery table exists. For Cloud DLP to create and apply - the tag template, the Cloud DLP service agent must have the - ``roles/datacatalog.tagTemplateOwner`` permission on the project. - The tag template contains fields summarizing the results of the - DlpJob. Any field values previously written by another DlpJob are - deleted. [InfoType naming patterns][google.privacy.dlp.v2.InfoType] - are strictly enforced when using this feature. - - Findings are persisted in Data Catalog storage and are governed by - service-specific policies for Data Catalog. For more information, - see `Service Specific - Terms `__. - - Only a single instance of this action can be specified. This action - is allowed only if all resources being scanned are BigQuery tables. - Compatible with: Inspect - - """ - - class Deidentify(proto.Message): - r"""Create a de-identified copy of the requested table or files. - - A TransformationDetail will be created for each transformation. - - If any rows in BigQuery are skipped during de-identification - (transformation errors or row size exceeds BigQuery insert API - limits) they are placed in the failure output table. If the original - row exceeds the BigQuery insert API limit it will be truncated when - written to the failure output table. The failure output table can be - set in the - action.deidentify.output.big_query_output.deidentified_failure_output_table - field, if no table is set, a table will be automatically created in - the same project and dataset as the original table. - - Compatible with: Inspect - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - transformation_config (google.cloud.dlp_v2.types.TransformationConfig): - User specified deidentify templates and - configs for structured, unstructured, and image - files. - transformation_details_storage_config (google.cloud.dlp_v2.types.TransformationDetailsStorageConfig): - Config for storing transformation details. This is separate - from the de-identified content, and contains metadata about - the successful transformations and/or failures that occurred - while de-identifying. This needs to be set in order for - users to access information about the status of each - transformation (see - [TransformationDetails][google.privacy.dlp.v2.TransformationDetails] - message for more information about what is noted). - cloud_storage_output (str): - Required. User settable Cloud Storage bucket - and folders to store de-identified files. This - field must be set for cloud storage - deidentification. The output Cloud Storage - bucket must be different from the input bucket. - De-identified files will overwrite files in the - output path. - Form of: gs://bucket/folder/ or gs://bucket - - This field is a member of `oneof`_ ``output``. - file_types_to_transform (MutableSequence[google.cloud.dlp_v2.types.FileType]): - List of user-specified file type groups to transform. If - specified, only the files with these filetypes will be - transformed. If empty, all supported files will be - transformed. Supported types may be automatically added over - time. If a file type is set in this field that isn't - supported by the Deidentify action then the job will fail - and will not be successfully created/started. Currently the - only filetypes supported are: IMAGES, TEXT_FILES, CSV, TSV. - """ - - transformation_config: 'TransformationConfig' = proto.Field( - proto.MESSAGE, - number=7, - message='TransformationConfig', - ) - transformation_details_storage_config: 'TransformationDetailsStorageConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='TransformationDetailsStorageConfig', - ) - cloud_storage_output: str = proto.Field( - proto.STRING, - number=9, - oneof='output', - ) - file_types_to_transform: MutableSequence[storage.FileType] = proto.RepeatedField( - proto.ENUM, - number=8, - enum=storage.FileType, - ) - - class JobNotificationEmails(proto.Message): - r"""Sends an email when the job completes. The email goes to IAM project - owners and technical `Essential - Contacts `__. - - """ - - class PublishToStackdriver(proto.Message): - r"""Enable Stackdriver metric dlp.googleapis.com/finding_count. This - will publish a metric to stack driver on each infotype requested and - how many findings were found for it. CustomDetectors will be - bucketed as 'Custom' under the Stackdriver label 'info_type'. - - """ - - save_findings: SaveFindings = proto.Field( - proto.MESSAGE, - number=1, - oneof='action', - message=SaveFindings, - ) - pub_sub: PublishToPubSub = proto.Field( - proto.MESSAGE, - number=2, - oneof='action', - message=PublishToPubSub, - ) - publish_summary_to_cscc: PublishSummaryToCscc = proto.Field( - proto.MESSAGE, - number=3, - oneof='action', - message=PublishSummaryToCscc, - ) - publish_findings_to_cloud_data_catalog: PublishFindingsToCloudDataCatalog = proto.Field( - proto.MESSAGE, - number=5, - oneof='action', - message=PublishFindingsToCloudDataCatalog, - ) - deidentify: Deidentify = proto.Field( - proto.MESSAGE, - number=7, - oneof='action', - message=Deidentify, - ) - job_notification_emails: JobNotificationEmails = proto.Field( - proto.MESSAGE, - number=8, - oneof='action', - message=JobNotificationEmails, - ) - publish_to_stackdriver: PublishToStackdriver = proto.Field( - proto.MESSAGE, - number=9, - oneof='action', - message=PublishToStackdriver, - ) - - -class TransformationConfig(proto.Message): - r"""User specified templates and configs for how to deidentify - structured, unstructures, and image files. User must provide - either a unstructured deidentify template or at least one redact - image config. - - Attributes: - deidentify_template (str): - De-identify template. If this template is specified, it will - serve as the default de-identify template. This template - cannot contain ``record_transformations`` since it can be - used for unstructured content such as free-form text files. - If this template is not set, a default - ``ReplaceWithInfoTypeConfig`` will be used to de-identify - unstructured content. - structured_deidentify_template (str): - Structured de-identify template. If this template is - specified, it will serve as the de-identify template for - structured content such as delimited files and tables. If - this template is not set but the ``deidentify_template`` is - set, then ``deidentify_template`` will also apply to the - structured content. If neither template is set, a default - ``ReplaceWithInfoTypeConfig`` will be used to de-identify - structured content. - image_redact_template (str): - Image redact template. - If this template is specified, it will serve as - the de-identify template for images. If this - template is not set, all findings in the image - will be redacted with a black box. - """ - - deidentify_template: str = proto.Field( - proto.STRING, - number=1, - ) - structured_deidentify_template: str = proto.Field( - proto.STRING, - number=2, - ) - image_redact_template: str = proto.Field( - proto.STRING, - number=4, - ) - - -class CreateInspectTemplateRequest(proto.Message): - r"""Request message for CreateInspectTemplate. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - Required. The InspectTemplate to create. - template_id (str): - The template id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - inspect_template: 'InspectTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectTemplate', - ) - template_id: str = proto.Field( - proto.STRING, - number=3, - ) - location_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class UpdateInspectTemplateRequest(proto.Message): - r"""Request message for UpdateInspectTemplate. - - Attributes: - name (str): - Required. Resource name of organization and inspectTemplate - to be updated, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - New InspectTemplate value. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - inspect_template: 'InspectTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectTemplate', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetInspectTemplateRequest(proto.Message): - r"""Request message for GetInspectTemplate. - - Attributes: - name (str): - Required. Resource name of the organization and - inspectTemplate to be read, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListInspectTemplatesRequest(proto.Message): - r"""Request message for ListInspectTemplates. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from previous call - to ``ListInspectTemplates``. - page_size (int): - Size of the page, can be limited by the - server. If zero server returns a page of max - size 100. - order_by (str): - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space - characters are insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the template was - created. - - ``update_time``: corresponds to the time the template was - last updated. - - ``name``: corresponds to the template's name. - - ``display_name``: corresponds to the template's display - name. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListInspectTemplatesResponse(proto.Message): - r"""Response message for ListInspectTemplates. - - Attributes: - inspect_templates (MutableSequence[google.cloud.dlp_v2.types.InspectTemplate]): - List of inspectTemplates, up to page_size in - ListInspectTemplatesRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in following - ListInspectTemplates request. - """ - - @property - def raw_page(self): - return self - - inspect_templates: MutableSequence['InspectTemplate'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='InspectTemplate', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteInspectTemplateRequest(proto.Message): - r"""Request message for DeleteInspectTemplate. - - Attributes: - name (str): - Required. Resource name of the organization and - inspectTemplate to be deleted, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateJobTriggerRequest(proto.Message): - r"""Request message for CreateJobTrigger. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - Required. The JobTrigger to create. - trigger_id (str): - The trigger id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - job_trigger: 'JobTrigger' = proto.Field( - proto.MESSAGE, - number=2, - message='JobTrigger', - ) - trigger_id: str = proto.Field( - proto.STRING, - number=3, - ) - location_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ActivateJobTriggerRequest(proto.Message): - r"""Request message for ActivateJobTrigger. - - Attributes: - name (str): - Required. Resource name of the trigger to activate, for - example ``projects/dlp-test-project/jobTriggers/53234423``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateJobTriggerRequest(proto.Message): - r"""Request message for UpdateJobTrigger. - - Attributes: - name (str): - Required. Resource name of the project and the triggeredJob, - for example - ``projects/dlp-test-project/jobTriggers/53234423``. - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - New JobTrigger value. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - job_trigger: 'JobTrigger' = proto.Field( - proto.MESSAGE, - number=2, - message='JobTrigger', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetJobTriggerRequest(proto.Message): - r"""Request message for GetJobTrigger. - - Attributes: - name (str): - Required. Resource name of the project and the triggeredJob, - for example - ``projects/dlp-test-project/jobTriggers/53234423``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateDlpJobRequest(proto.Message): - r"""Request message for CreateDlpJobRequest. Used to initiate - long running jobs such as calculating risk metrics or inspecting - Google Cloud Storage. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): - An inspection job scans a storage repository - for InfoTypes. - - This field is a member of `oneof`_ ``job``. - risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): - A risk analysis job calculates - re-identification risk metrics for a BigQuery - table. - - This field is a member of `oneof`_ ``job``. - job_id (str): - The job id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - inspect_job: 'InspectJobConfig' = proto.Field( - proto.MESSAGE, - number=2, - oneof='job', - message='InspectJobConfig', - ) - risk_job: 'RiskAnalysisJobConfig' = proto.Field( - proto.MESSAGE, - number=3, - oneof='job', - message='RiskAnalysisJobConfig', - ) - job_id: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListJobTriggersRequest(proto.Message): - r"""Request message for ListJobTriggers. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from previous call - to ListJobTriggers. ``order_by`` field must not change for - subsequent calls. - page_size (int): - Size of the page, can be limited by a server. - order_by (str): - Comma separated list of triggeredJob fields to order by, - followed by ``asc`` or ``desc`` postfix. This list is - case-insensitive, default sorting order is ascending, - redundant space characters are insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the JobTrigger - was created. - - ``update_time``: corresponds to the time the JobTrigger - was last updated. - - ``last_run_time``: corresponds to the last time the - JobTrigger ran. - - ``name``: corresponds to the JobTrigger's name. - - ``display_name``: corresponds to the JobTrigger's display - name. - - ``status``: corresponds to JobTrigger's status. - filter (str): - Allows filtering. - - Supported syntax: - - - Filter expressions are made up of one or more - restrictions. - - Restrictions can be combined by ``AND`` or ``OR`` logical - operators. A sequence of restrictions implicitly uses - ``AND``. - - A restriction has the form of - ``{field} {operator} {value}``. - - Supported fields/values for inspect triggers: - - - ``status`` - HEALTHY|PAUSED|CANCELLED - - ``inspected_storage`` - - DATASTORE|CLOUD_STORAGE|BIGQUERY - - 'last_run_time\` - RFC 3339 formatted timestamp, - surrounded by quotation marks. Nanoseconds are - ignored. - - 'error_count' - Number of errors that have occurred - while running. - - - The operator must be ``=`` or ``!=`` for status and - inspected_storage. - - Examples: - - - inspected_storage = cloud_storage AND status = HEALTHY - - inspected_storage = cloud_storage OR inspected_storage = - bigquery - - inspected_storage = cloud_storage AND (state = PAUSED OR - state = HEALTHY) - - last_run_time > "2017-12-12T00:00:00+00:00" - - The length of this field should be no more than 500 - characters. - type_ (google.cloud.dlp_v2.types.DlpJobType): - The type of jobs. Will use ``DlpJobType.INSPECT`` if not - set. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=5, - ) - type_: 'DlpJobType' = proto.Field( - proto.ENUM, - number=6, - enum='DlpJobType', - ) - location_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -class ListJobTriggersResponse(proto.Message): - r"""Response message for ListJobTriggers. - - Attributes: - job_triggers (MutableSequence[google.cloud.dlp_v2.types.JobTrigger]): - List of triggeredJobs, up to page_size in - ListJobTriggersRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in following - ListJobTriggers request. - """ - - @property - def raw_page(self): - return self - - job_triggers: MutableSequence['JobTrigger'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='JobTrigger', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteJobTriggerRequest(proto.Message): - r"""Request message for DeleteJobTrigger. - - Attributes: - name (str): - Required. Resource name of the project and the triggeredJob, - for example - ``projects/dlp-test-project/jobTriggers/53234423``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class InspectJobConfig(proto.Message): - r"""Controls what and how to inspect for findings. - - Attributes: - storage_config (google.cloud.dlp_v2.types.StorageConfig): - The data to scan. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - How and what to scan for. - inspect_template_name (str): - If provided, will be used as the default for all values in - InspectConfig. ``inspect_config`` will be merged into the - values persisted as part of the template. - actions (MutableSequence[google.cloud.dlp_v2.types.Action]): - Actions to execute at the completion of the - job. - """ - - storage_config: storage.StorageConfig = proto.Field( - proto.MESSAGE, - number=1, - message=storage.StorageConfig, - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - inspect_template_name: str = proto.Field( - proto.STRING, - number=3, - ) - actions: MutableSequence['Action'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='Action', - ) - - -class DataProfileAction(proto.Message): - r"""A task to execute when a data profile has been generated. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - export_data (google.cloud.dlp_v2.types.DataProfileAction.Export): - Export data profiles into a provided - location. - - This field is a member of `oneof`_ ``action``. - pub_sub_notification (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification): - Publish a message into the Pub/Sub topic. - - This field is a member of `oneof`_ ``action``. - """ - class EventType(proto.Enum): - r"""Types of event that can trigger an action. - - Values: - EVENT_TYPE_UNSPECIFIED (0): - Unused. - NEW_PROFILE (1): - New profile (not a re-profile). - CHANGED_PROFILE (2): - Changed one of the following profile metrics: - - - Table data risk score - - Table sensitivity score - - Table resource visibility - - Table encryption type - - Table predicted infoTypes - - Table other infoTypes - SCORE_INCREASED (3): - Table data risk score or sensitivity score - increased. - ERROR_CHANGED (4): - A user (non-internal) error occurred. - """ - EVENT_TYPE_UNSPECIFIED = 0 - NEW_PROFILE = 1 - CHANGED_PROFILE = 2 - SCORE_INCREASED = 3 - ERROR_CHANGED = 4 - - class Export(proto.Message): - r"""If set, the detailed data profiles will be persisted to the - location of your choice whenever updated. - - Attributes: - profile_table (google.cloud.dlp_v2.types.BigQueryTable): - Store all table and column profiles in an - existing table or a new table in an existing - dataset. Each re-generation will result in a new - row in BigQuery. - """ - - profile_table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=1, - message=storage.BigQueryTable, - ) - - class PubSubNotification(proto.Message): - r"""Send a Pub/Sub message into the given Pub/Sub topic to connect other - systems to data profile generation. The message payload data will be - the byte serialization of ``DataProfilePubSubMessage``. - - Attributes: - topic (str): - Cloud Pub/Sub topic to send notifications to. - Format is projects/{project}/topics/{topic}. - event (google.cloud.dlp_v2.types.DataProfileAction.EventType): - The type of event that triggers a Pub/Sub. At most one - ``PubSubNotification`` per EventType is permitted. - pubsub_condition (google.cloud.dlp_v2.types.DataProfilePubSubCondition): - Conditions (e.g., data risk or sensitivity - level) for triggering a Pub/Sub. - detail_of_message (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification.DetailLevel): - How much data to include in the Pub/Sub message. If the user - wishes to limit the size of the message, they can use - resource_name and fetch the profile fields they wish to. Per - table profile (not per column). - """ - class DetailLevel(proto.Enum): - r"""The levels of detail that can be included in the Pub/Sub - message. - - Values: - DETAIL_LEVEL_UNSPECIFIED (0): - Unused. - TABLE_PROFILE (1): - The full table data profile. - RESOURCE_NAME (2): - The resource name of the table. - """ - DETAIL_LEVEL_UNSPECIFIED = 0 - TABLE_PROFILE = 1 - RESOURCE_NAME = 2 - - topic: str = proto.Field( - proto.STRING, - number=1, - ) - event: 'DataProfileAction.EventType' = proto.Field( - proto.ENUM, - number=2, - enum='DataProfileAction.EventType', - ) - pubsub_condition: 'DataProfilePubSubCondition' = proto.Field( - proto.MESSAGE, - number=3, - message='DataProfilePubSubCondition', - ) - detail_of_message: 'DataProfileAction.PubSubNotification.DetailLevel' = proto.Field( - proto.ENUM, - number=4, - enum='DataProfileAction.PubSubNotification.DetailLevel', - ) - - export_data: Export = proto.Field( - proto.MESSAGE, - number=1, - oneof='action', - message=Export, - ) - pub_sub_notification: PubSubNotification = proto.Field( - proto.MESSAGE, - number=2, - oneof='action', - message=PubSubNotification, - ) - - -class DataProfileJobConfig(proto.Message): - r"""Configuration for setting up a job to scan resources for profile - generation. Only one data profile configuration may exist per - organization, folder, or project. - - The generated data profiles are retained according to the [data - retention policy] - (https://cloud.google.com/dlp/docs/data-profiles#retention). - - Attributes: - location (google.cloud.dlp_v2.types.DataProfileLocation): - The data to scan. - project_id (str): - The project that will run the scan. The DLP - service account that exists within this project - must have access to all resources that are - profiled, and the Cloud DLP API must be enabled. - inspect_templates (MutableSequence[str]): - Detection logic for profile generation. - - Not all template features are used by profiles. - FindingLimits, include_quote and exclude_info_types have no - impact on data profiling. - - Multiple templates may be provided if there is data in - multiple regions. At most one template must be specified - per-region (including "global"). Each region is scanned - using the applicable template. If no region-specific - template is specified, but a "global" template is specified, - it will be copied to that region and used instead. If no - global or region-specific template is provided for a region - with data, that region's data will not be scanned. - - For more information, see - https://cloud.google.com/dlp/docs/data-profiles#data_residency. - data_profile_actions (MutableSequence[google.cloud.dlp_v2.types.DataProfileAction]): - Actions to execute at the completion of the - job. - """ - - location: 'DataProfileLocation' = proto.Field( - proto.MESSAGE, - number=1, - message='DataProfileLocation', - ) - project_id: str = proto.Field( - proto.STRING, - number=5, - ) - inspect_templates: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - data_profile_actions: MutableSequence['DataProfileAction'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='DataProfileAction', - ) - - -class DataProfileLocation(proto.Message): - r"""The data that will be profiled. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - organization_id (int): - The ID of an organization to scan. - - This field is a member of `oneof`_ ``location``. - folder_id (int): - The ID of the Folder within an organization - to scan. - - This field is a member of `oneof`_ ``location``. - """ - - organization_id: int = proto.Field( - proto.INT64, - number=1, - oneof='location', - ) - folder_id: int = proto.Field( - proto.INT64, - number=2, - oneof='location', - ) - - -class DlpJob(proto.Message): - r"""Combines all of the information about a DLP job. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - The server-assigned name. - type_ (google.cloud.dlp_v2.types.DlpJobType): - The type of job. - state (google.cloud.dlp_v2.types.DlpJob.JobState): - State of a job. - risk_details (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails): - Results from analyzing risk of a data source. - - This field is a member of `oneof`_ ``details``. - inspect_details (google.cloud.dlp_v2.types.InspectDataSourceDetails): - Results from inspecting a data source. - - This field is a member of `oneof`_ ``details``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the job was created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the job started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the job finished. - job_trigger_name (str): - If created by a job trigger, the resource - name of the trigger that instantiated the job. - errors (MutableSequence[google.cloud.dlp_v2.types.Error]): - A stream of errors encountered running the - job. - """ - class JobState(proto.Enum): - r"""Possible states of a job. New items may be added. - - Values: - JOB_STATE_UNSPECIFIED (0): - Unused. - PENDING (1): - The job has not yet started. - RUNNING (2): - The job is currently running. Once a job has - finished it will transition to FAILED or DONE. - DONE (3): - The job is no longer running. - CANCELED (4): - The job was canceled before it could be - completed. - FAILED (5): - The job had an error and did not complete. - ACTIVE (6): - The job is currently accepting findings via - hybridInspect. A hybrid job in ACTIVE state may - continue to have findings added to it through - the calling of hybridInspect. After the job has - finished no more calls to hybridInspect may be - made. ACTIVE jobs can transition to DONE. - """ - JOB_STATE_UNSPECIFIED = 0 - PENDING = 1 - RUNNING = 2 - DONE = 3 - CANCELED = 4 - FAILED = 5 - ACTIVE = 6 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'DlpJobType' = proto.Field( - proto.ENUM, - number=2, - enum='DlpJobType', - ) - state: JobState = proto.Field( - proto.ENUM, - number=3, - enum=JobState, - ) - risk_details: 'AnalyzeDataSourceRiskDetails' = proto.Field( - proto.MESSAGE, - number=4, - oneof='details', - message='AnalyzeDataSourceRiskDetails', - ) - inspect_details: 'InspectDataSourceDetails' = proto.Field( - proto.MESSAGE, - number=5, - oneof='details', - message='InspectDataSourceDetails', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - job_trigger_name: str = proto.Field( - proto.STRING, - number=10, - ) - errors: MutableSequence['Error'] = proto.RepeatedField( - proto.MESSAGE, - number=11, - message='Error', - ) - - -class GetDlpJobRequest(proto.Message): - r"""The request message for [DlpJobs.GetDlpJob][]. - - Attributes: - name (str): - Required. The name of the DlpJob resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDlpJobsRequest(proto.Message): - r"""The request message for listing DLP jobs. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - filter (str): - Allows filtering. - - Supported syntax: - - - Filter expressions are made up of one or more - restrictions. - - Restrictions can be combined by ``AND`` or ``OR`` logical - operators. A sequence of restrictions implicitly uses - ``AND``. - - A restriction has the form of - ``{field} {operator} {value}``. - - Supported fields/values for inspect jobs: - - - ``state`` - PENDING|RUNNING|CANCELED|FINISHED|FAILED - - ``inspected_storage`` - - DATASTORE|CLOUD_STORAGE|BIGQUERY - - ``trigger_name`` - The name of the trigger that - created the job. - - 'end_time\` - Corresponds to the time the job - finished. - - 'start_time\` - Corresponds to the time the job - finished. - - - Supported fields for risk analysis jobs: - - - ``state`` - RUNNING|CANCELED|FINISHED|FAILED - - 'end_time\` - Corresponds to the time the job - finished. - - 'start_time\` - Corresponds to the time the job - finished. - - - The operator must be ``=`` or ``!=``. - - Examples: - - - inspected_storage = cloud_storage AND state = done - - inspected_storage = cloud_storage OR inspected_storage = - bigquery - - inspected_storage = cloud_storage AND (state = done OR - state = canceled) - - end_time > "2017-12-12T00:00:00+00:00" - - The length of this field should be no more than 500 - characters. - page_size (int): - The standard list page size. - page_token (str): - The standard list page token. - type_ (google.cloud.dlp_v2.types.DlpJobType): - The type of job. Defaults to ``DlpJobType.INSPECT`` - order_by (str): - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space - characters are insignificant. - - Example: ``name asc, end_time asc, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the job was - created. - - ``end_time``: corresponds to the time the job ended. - - ``name``: corresponds to the job's name. - - ``state``: corresponds to ``state`` - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - type_: 'DlpJobType' = proto.Field( - proto.ENUM, - number=5, - enum='DlpJobType', - ) - order_by: str = proto.Field( - proto.STRING, - number=6, - ) - location_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -class ListDlpJobsResponse(proto.Message): - r"""The response message for listing DLP jobs. - - Attributes: - jobs (MutableSequence[google.cloud.dlp_v2.types.DlpJob]): - A list of DlpJobs that matches the specified - filter in the request. - next_page_token (str): - The standard List next-page token. - """ - - @property - def raw_page(self): - return self - - jobs: MutableSequence['DlpJob'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DlpJob', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CancelDlpJobRequest(proto.Message): - r"""The request message for canceling a DLP job. - - Attributes: - name (str): - Required. The name of the DlpJob resource to - be cancelled. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class FinishDlpJobRequest(proto.Message): - r"""The request message for finishing a DLP hybrid job. - - Attributes: - name (str): - Required. The name of the DlpJob resource to - be cancelled. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteDlpJobRequest(proto.Message): - r"""The request message for deleting a DLP job. - - Attributes: - name (str): - Required. The name of the DlpJob resource to - be deleted. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateDeidentifyTemplateRequest(proto.Message): - r"""Request message for CreateDeidentifyTemplate. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - Required. The DeidentifyTemplate to create. - template_id (str): - The template id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - deidentify_template: 'DeidentifyTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyTemplate', - ) - template_id: str = proto.Field( - proto.STRING, - number=3, - ) - location_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class UpdateDeidentifyTemplateRequest(proto.Message): - r"""Request message for UpdateDeidentifyTemplate. - - Attributes: - name (str): - Required. Resource name of organization and deidentify - template to be updated, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - New DeidentifyTemplate value. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - deidentify_template: 'DeidentifyTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyTemplate', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetDeidentifyTemplateRequest(proto.Message): - r"""Request message for GetDeidentifyTemplate. - - Attributes: - name (str): - Required. Resource name of the organization and deidentify - template to be read, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDeidentifyTemplatesRequest(proto.Message): - r"""Request message for ListDeidentifyTemplates. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from previous call - to ``ListDeidentifyTemplates``. - page_size (int): - Size of the page, can be limited by the - server. If zero server returns a page of max - size 100. - order_by (str): - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space - characters are insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the template was - created. - - ``update_time``: corresponds to the time the template was - last updated. - - ``name``: corresponds to the template's name. - - ``display_name``: corresponds to the template's display - name. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListDeidentifyTemplatesResponse(proto.Message): - r"""Response message for ListDeidentifyTemplates. - - Attributes: - deidentify_templates (MutableSequence[google.cloud.dlp_v2.types.DeidentifyTemplate]): - List of deidentify templates, up to page_size in - ListDeidentifyTemplatesRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in following - ListDeidentifyTemplates request. - """ - - @property - def raw_page(self): - return self - - deidentify_templates: MutableSequence['DeidentifyTemplate'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DeidentifyTemplate', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteDeidentifyTemplateRequest(proto.Message): - r"""Request message for DeleteDeidentifyTemplate. - - Attributes: - name (str): - Required. Resource name of the organization and deidentify - template to be deleted, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class LargeCustomDictionaryConfig(proto.Message): - r"""Configuration for a custom dictionary created from a data source of - any size up to the maximum size defined in the - `limits `__ page. The artifacts - of dictionary creation are stored in the specified Cloud Storage - location. Consider using ``CustomInfoType.Dictionary`` for smaller - dictionaries that satisfy the size requirements. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - output_path (google.cloud.dlp_v2.types.CloudStoragePath): - Location to store dictionary artifacts in - Cloud Storage. These files will only be - accessible by project owners and the DLP API. If - any of these artifacts are modified, the - dictionary is considered invalid and can no - longer be used. - cloud_storage_file_set (google.cloud.dlp_v2.types.CloudStorageFileSet): - Set of files containing newline-delimited - lists of dictionary phrases. - - This field is a member of `oneof`_ ``source``. - big_query_field (google.cloud.dlp_v2.types.BigQueryField): - Field in a BigQuery table where each cell - represents a dictionary phrase. - - This field is a member of `oneof`_ ``source``. - """ - - output_path: storage.CloudStoragePath = proto.Field( - proto.MESSAGE, - number=1, - message=storage.CloudStoragePath, - ) - cloud_storage_file_set: storage.CloudStorageFileSet = proto.Field( - proto.MESSAGE, - number=2, - oneof='source', - message=storage.CloudStorageFileSet, - ) - big_query_field: storage.BigQueryField = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message=storage.BigQueryField, - ) - - -class LargeCustomDictionaryStats(proto.Message): - r"""Summary statistics of a custom dictionary. - - Attributes: - approx_num_phrases (int): - Approximate number of distinct phrases in the - dictionary. - """ - - approx_num_phrases: int = proto.Field( - proto.INT64, - number=1, - ) - - -class StoredInfoTypeConfig(proto.Message): - r"""Configuration for stored infoTypes. All fields and subfield - are provided by the user. For more information, see - https://cloud.google.com/dlp/docs/creating-custom-infotypes. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - display_name (str): - Display name of the StoredInfoType (max 256 - characters). - description (str): - Description of the StoredInfoType (max 256 - characters). - large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryConfig): - StoredInfoType where findings are defined by - a dictionary of phrases. - - This field is a member of `oneof`_ ``type``. - dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): - Store dictionary-based CustomInfoType. - - This field is a member of `oneof`_ ``type``. - regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Store regular expression-based - StoredInfoType. - - This field is a member of `oneof`_ ``type``. - """ - - display_name: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - large_custom_dictionary: 'LargeCustomDictionaryConfig' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='LargeCustomDictionaryConfig', - ) - dictionary: storage.CustomInfoType.Dictionary = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message=storage.CustomInfoType.Dictionary, - ) - regex: storage.CustomInfoType.Regex = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message=storage.CustomInfoType.Regex, - ) - - -class StoredInfoTypeStats(proto.Message): - r"""Statistics for a StoredInfoType. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryStats): - StoredInfoType where findings are defined by - a dictionary of phrases. - - This field is a member of `oneof`_ ``type``. - """ - - large_custom_dictionary: 'LargeCustomDictionaryStats' = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message='LargeCustomDictionaryStats', - ) - - -class StoredInfoTypeVersion(proto.Message): - r"""Version of a StoredInfoType, including the configuration used - to build it, create timestamp, and current state. - - Attributes: - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - StoredInfoType configuration. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Create timestamp of the version. Read-only, - determined by the system when the version is - created. - state (google.cloud.dlp_v2.types.StoredInfoTypeState): - Stored info type version state. Read-only, - updated by the system during dictionary - creation. - errors (MutableSequence[google.cloud.dlp_v2.types.Error]): - Errors that occurred when creating this storedInfoType - version, or anomalies detected in the storedInfoType data - that render it unusable. Only the five most recent errors - will be displayed, with the most recent error appearing - first. - - For example, some of the data for stored custom dictionaries - is put in the user's Cloud Storage bucket, and if this data - is modified or deleted by the user or another system, the - dictionary becomes invalid. - - If any errors occur, fix the problem indicated by the error - message and use the UpdateStoredInfoType API method to - create another version of the storedInfoType to continue - using it, reusing the same ``config`` if it was not the - source of the error. - stats (google.cloud.dlp_v2.types.StoredInfoTypeStats): - Statistics about this storedInfoType version. - """ - - config: 'StoredInfoTypeConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='StoredInfoTypeConfig', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - state: 'StoredInfoTypeState' = proto.Field( - proto.ENUM, - number=3, - enum='StoredInfoTypeState', - ) - errors: MutableSequence['Error'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='Error', - ) - stats: 'StoredInfoTypeStats' = proto.Field( - proto.MESSAGE, - number=5, - message='StoredInfoTypeStats', - ) - - -class StoredInfoType(proto.Message): - r"""StoredInfoType resource message that contains information - about the current version and any pending updates. - - Attributes: - name (str): - Resource name. - current_version (google.cloud.dlp_v2.types.StoredInfoTypeVersion): - Current version of the stored info type. - pending_versions (MutableSequence[google.cloud.dlp_v2.types.StoredInfoTypeVersion]): - Pending versions of the stored info type. - Empty if no versions are pending. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - current_version: 'StoredInfoTypeVersion' = proto.Field( - proto.MESSAGE, - number=2, - message='StoredInfoTypeVersion', - ) - pending_versions: MutableSequence['StoredInfoTypeVersion'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='StoredInfoTypeVersion', - ) - - -class CreateStoredInfoTypeRequest(proto.Message): - r"""Request message for CreateStoredInfoType. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Required. Configuration of the storedInfoType - to create. - stored_info_type_id (str): - The storedInfoType ID can contain uppercase and lowercase - letters, numbers, and hyphens; that is, it must match the - regular expression: ``[a-zA-Z\d-_]+``. The maximum length is - 100 characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - config: 'StoredInfoTypeConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='StoredInfoTypeConfig', - ) - stored_info_type_id: str = proto.Field( - proto.STRING, - number=3, - ) - location_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class UpdateStoredInfoTypeRequest(proto.Message): - r"""Request message for UpdateStoredInfoType. - - Attributes: - name (str): - Required. Resource name of organization and storedInfoType - to be updated, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Updated configuration for the storedInfoType. - If not provided, a new version of the - storedInfoType will be created with the existing - configuration. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - config: 'StoredInfoTypeConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='StoredInfoTypeConfig', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetStoredInfoTypeRequest(proto.Message): - r"""Request message for GetStoredInfoType. - - Attributes: - name (str): - Required. Resource name of the organization and - storedInfoType to be read, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListStoredInfoTypesRequest(proto.Message): - r"""Request message for ListStoredInfoTypes. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from previous call - to ``ListStoredInfoTypes``. - page_size (int): - Size of the page, can be limited by the - server. If zero server returns a page of max - size 100. - order_by (str): - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space - characters are insignificant. - - Example: ``name asc, display_name, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the most recent - version of the resource was created. - - ``state``: corresponds to the state of the resource. - - ``name``: corresponds to resource name. - - ``display_name``: corresponds to info type's display - name. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListStoredInfoTypesResponse(proto.Message): - r"""Response message for ListStoredInfoTypes. - - Attributes: - stored_info_types (MutableSequence[google.cloud.dlp_v2.types.StoredInfoType]): - List of storedInfoTypes, up to page_size in - ListStoredInfoTypesRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in following - ListStoredInfoTypes request. - """ - - @property - def raw_page(self): - return self - - stored_info_types: MutableSequence['StoredInfoType'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='StoredInfoType', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteStoredInfoTypeRequest(proto.Message): - r"""Request message for DeleteStoredInfoType. - - Attributes: - name (str): - Required. Resource name of the organization and - storedInfoType to be deleted, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class HybridInspectJobTriggerRequest(proto.Message): - r"""Request to search for potentially sensitive info in a custom - location. - - Attributes: - name (str): - Required. Resource name of the trigger to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): - The item to inspect. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - hybrid_item: 'HybridContentItem' = proto.Field( - proto.MESSAGE, - number=3, - message='HybridContentItem', - ) - - -class HybridInspectDlpJobRequest(proto.Message): - r"""Request to search for potentially sensitive info in a custom - location. - - Attributes: - name (str): - Required. Resource name of the job to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/dlpJob/53234423``. - hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): - The item to inspect. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - hybrid_item: 'HybridContentItem' = proto.Field( - proto.MESSAGE, - number=3, - message='HybridContentItem', - ) - - -class HybridContentItem(proto.Message): - r"""An individual hybrid item to inspect. Will be stored - temporarily during processing. - - Attributes: - item (google.cloud.dlp_v2.types.ContentItem): - The item to inspect. - finding_details (google.cloud.dlp_v2.types.HybridFindingDetails): - Supplementary information that will be added - to each finding. - """ - - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=1, - message='ContentItem', - ) - finding_details: 'HybridFindingDetails' = proto.Field( - proto.MESSAGE, - number=2, - message='HybridFindingDetails', - ) - - -class HybridFindingDetails(proto.Message): - r"""Populate to associate additional data with each finding. - - Attributes: - container_details (google.cloud.dlp_v2.types.Container): - Details about the container where the content - being inspected is from. - file_offset (int): - Offset in bytes of the line, from the - beginning of the file, where the finding is - located. Populate if the item being scanned is - only part of a bigger item, such as a shard of a - file and you want to track the absolute position - of the finding. - row_offset (int): - Offset of the row for tables. Populate if the - row(s) being scanned are part of a bigger - dataset and you want to keep track of their - absolute position. - table_options (google.cloud.dlp_v2.types.TableOptions): - If the container is a table, additional information to make - findings meaningful such as the columns that are primary - keys. If not known ahead of time, can also be set within - each inspect hybrid call and the two will be merged. Note - that identifying_fields will only be stored to BigQuery, and - only if the BigQuery action has been included. - labels (MutableMapping[str, str]): - Labels to represent user provided metadata about the data - being inspected. If configured by the job, some key values - may be required. The labels associated with ``Finding``'s - produced by hybrid inspection. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 10 labels can be associated with a given - finding. - - Examples: - - - ``"environment" : "production"`` - - ``"pipeline" : "etl"`` - """ - - container_details: 'Container' = proto.Field( - proto.MESSAGE, - number=1, - message='Container', - ) - file_offset: int = proto.Field( - proto.INT64, - number=2, - ) - row_offset: int = proto.Field( - proto.INT64, - number=3, - ) - table_options: storage.TableOptions = proto.Field( - proto.MESSAGE, - number=4, - message=storage.TableOptions, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - - -class HybridInspectResponse(proto.Message): - r"""Quota exceeded errors will be thrown once quota has been met. - """ - - -class DataRiskLevel(proto.Message): - r"""Score is a summary of all elements in the data profile. - A higher number means more risk. - - Attributes: - score (google.cloud.dlp_v2.types.DataRiskLevel.DataRiskLevelScore): - The score applied to the resource. - """ - class DataRiskLevelScore(proto.Enum): - r"""Various score levels for resources. - - Values: - RISK_SCORE_UNSPECIFIED (0): - Unused. - RISK_LOW (10): - Low risk - Lower indication of sensitive data - that appears to have additional access - restrictions in place or no indication of - sensitive data found. - RISK_MODERATE (20): - Medium risk - Sensitive data may be present - but additional access or fine grain access - restrictions appear to be present. Consider - limiting access even further or transform data - to mask. - RISK_HIGH (30): - High risk – SPII may be present. Access - controls may include public ACLs. Exfiltration - of data may lead to user data loss. - Re-identification of users may be possible. - Consider limiting usage and or removing SPII. - """ - RISK_SCORE_UNSPECIFIED = 0 - RISK_LOW = 10 - RISK_MODERATE = 20 - RISK_HIGH = 30 - - score: DataRiskLevelScore = proto.Field( - proto.ENUM, - number=1, - enum=DataRiskLevelScore, - ) - - -class DataProfileConfigSnapshot(proto.Message): - r"""Snapshot of the configurations used to generate the profile. - - Attributes: - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - A copy of the inspection config used to generate this - profile. This is a copy of the inspect_template specified in - ``DataProfileJobConfig``. - data_profile_job (google.cloud.dlp_v2.types.DataProfileJobConfig): - A copy of the configuration used to generate - this profile. - """ - - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - data_profile_job: 'DataProfileJobConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='DataProfileJobConfig', - ) - - -class TableDataProfile(proto.Message): - r"""The profile for a scanned table. - - Attributes: - name (str): - The name of the profile. - project_data_profile (str): - The resource name to the project data profile - for this table. - dataset_project_id (str): - The GCP project ID that owns the BigQuery - dataset. - dataset_location (str): - The BigQuery location where the dataset's - data is stored. See - https://cloud.google.com/bigquery/docs/locations - for supported locations. - dataset_id (str): - The BigQuery dataset ID. - table_id (str): - The BigQuery table ID. - full_resource (str): - The resource name of the table. - https://cloud.google.com/apis/design/resource_names#full_resource_name - profile_status (google.cloud.dlp_v2.types.ProfileStatus): - Success or error status from the most recent - profile generation attempt. May be empty if the - profile is still being generated. - state (google.cloud.dlp_v2.types.TableDataProfile.State): - State of a profile. - sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): - The sensitivity score of this table. - data_risk_level (google.cloud.dlp_v2.types.DataRiskLevel): - The data risk level of this table. - predicted_info_types (MutableSequence[google.cloud.dlp_v2.types.InfoTypeSummary]): - The infoTypes predicted from this table's - data. - other_info_types (MutableSequence[google.cloud.dlp_v2.types.OtherInfoTypeSummary]): - Other infoTypes found in this table's data. - config_snapshot (google.cloud.dlp_v2.types.DataProfileConfigSnapshot): - The snapshot of the configurations used to - generate the profile. - last_modified_time (google.protobuf.timestamp_pb2.Timestamp): - The time when this table was last modified - expiration_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The time when this table expires. - scanned_column_count (int): - The number of columns profiled in the table. - failed_column_count (int): - The number of columns skipped in the table - because of an error. - table_size_bytes (int): - The size of the table when the profile was - generated. - row_count (int): - Number of rows in the table when the profile - was generated. This will not be populated for - BigLake tables. - encryption_status (google.cloud.dlp_v2.types.EncryptionStatus): - How the table is encrypted. - resource_visibility (google.cloud.dlp_v2.types.ResourceVisibility): - How broadly a resource has been shared. - profile_last_generated (google.protobuf.timestamp_pb2.Timestamp): - The last time the profile was generated. - resource_labels (MutableMapping[str, str]): - The labels applied to the resource at the - time the profile was generated. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the table was created. - """ - class State(proto.Enum): - r"""Possible states of a profile. New items may be added. - - Values: - STATE_UNSPECIFIED (0): - Unused. - RUNNING (1): - The profile is currently running. Once a - profile has finished it will transition to DONE. - DONE (2): - The profile is no longer generating. If - profile_status.status.code is 0, the profile succeeded, - otherwise, it failed. - """ - STATE_UNSPECIFIED = 0 - RUNNING = 1 - DONE = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - project_data_profile: str = proto.Field( - proto.STRING, - number=2, - ) - dataset_project_id: str = proto.Field( - proto.STRING, - number=24, - ) - dataset_location: str = proto.Field( - proto.STRING, - number=29, - ) - dataset_id: str = proto.Field( - proto.STRING, - number=25, - ) - table_id: str = proto.Field( - proto.STRING, - number=26, - ) - full_resource: str = proto.Field( - proto.STRING, - number=3, - ) - profile_status: 'ProfileStatus' = proto.Field( - proto.MESSAGE, - number=21, - message='ProfileStatus', - ) - state: State = proto.Field( - proto.ENUM, - number=22, - enum=State, - ) - sensitivity_score: storage.SensitivityScore = proto.Field( - proto.MESSAGE, - number=5, - message=storage.SensitivityScore, - ) - data_risk_level: 'DataRiskLevel' = proto.Field( - proto.MESSAGE, - number=6, - message='DataRiskLevel', - ) - predicted_info_types: MutableSequence['InfoTypeSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=27, - message='InfoTypeSummary', - ) - other_info_types: MutableSequence['OtherInfoTypeSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=28, - message='OtherInfoTypeSummary', - ) - config_snapshot: 'DataProfileConfigSnapshot' = proto.Field( - proto.MESSAGE, - number=7, - message='DataProfileConfigSnapshot', - ) - last_modified_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - expiration_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - scanned_column_count: int = proto.Field( - proto.INT64, - number=10, - ) - failed_column_count: int = proto.Field( - proto.INT64, - number=11, - ) - table_size_bytes: int = proto.Field( - proto.INT64, - number=12, - ) - row_count: int = proto.Field( - proto.INT64, - number=13, - ) - encryption_status: 'EncryptionStatus' = proto.Field( - proto.ENUM, - number=14, - enum='EncryptionStatus', - ) - resource_visibility: 'ResourceVisibility' = proto.Field( - proto.ENUM, - number=15, - enum='ResourceVisibility', - ) - profile_last_generated: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=16, - message=timestamp_pb2.Timestamp, - ) - resource_labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=17, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=23, - message=timestamp_pb2.Timestamp, - ) - - -class ProfileStatus(proto.Message): - r""" - - Attributes: - status (google.rpc.status_pb2.Status): - Profiling status code and optional message - timestamp (google.protobuf.timestamp_pb2.Timestamp): - Time when the profile generation status was - updated - """ - - status: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=1, - message=status_pb2.Status, - ) - timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class InfoTypeSummary(proto.Message): - r"""The infoType details for this column. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - The infoType. - estimated_prevalence (int): - Not populated for predicted infotypes. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - estimated_prevalence: int = proto.Field( - proto.INT32, - number=2, - ) - - -class OtherInfoTypeSummary(proto.Message): - r"""Infotype details for other infoTypes found within a column. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - The other infoType. - estimated_prevalence (int): - Approximate percentage of non-null rows that - contained data detected by this infotype. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - estimated_prevalence: int = proto.Field( - proto.INT32, - number=2, - ) - - -class DataProfilePubSubCondition(proto.Message): - r"""A condition for determining whether a Pub/Sub should be - triggered. - - Attributes: - expressions (google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubExpressions): - An expression. - """ - class ProfileScoreBucket(proto.Enum): - r"""Various score levels for resources. - - Values: - PROFILE_SCORE_BUCKET_UNSPECIFIED (0): - Unused. - HIGH (1): - High risk/sensitivity detected. - MEDIUM_OR_HIGH (2): - Medium or high risk/sensitivity detected. - """ - PROFILE_SCORE_BUCKET_UNSPECIFIED = 0 - HIGH = 1 - MEDIUM_OR_HIGH = 2 - - class PubSubCondition(proto.Message): - r"""A condition consisting of a value. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - minimum_risk_score (google.cloud.dlp_v2.types.DataProfilePubSubCondition.ProfileScoreBucket): - The minimum data risk score that triggers the - condition. - - This field is a member of `oneof`_ ``value``. - minimum_sensitivity_score (google.cloud.dlp_v2.types.DataProfilePubSubCondition.ProfileScoreBucket): - The minimum sensitivity level that triggers - the condition. - - This field is a member of `oneof`_ ``value``. - """ - - minimum_risk_score: 'DataProfilePubSubCondition.ProfileScoreBucket' = proto.Field( - proto.ENUM, - number=1, - oneof='value', - enum='DataProfilePubSubCondition.ProfileScoreBucket', - ) - minimum_sensitivity_score: 'DataProfilePubSubCondition.ProfileScoreBucket' = proto.Field( - proto.ENUM, - number=2, - oneof='value', - enum='DataProfilePubSubCondition.ProfileScoreBucket', - ) - - class PubSubExpressions(proto.Message): - r"""An expression, consisting of an operator and conditions. - - Attributes: - logical_operator (google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator): - The operator to apply to the collection of - conditions. - conditions (MutableSequence[google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubCondition]): - Conditions to apply to the expression. - """ - class PubSubLogicalOperator(proto.Enum): - r"""Logical operators for conditional checks. - - Values: - LOGICAL_OPERATOR_UNSPECIFIED (0): - Unused. - OR (1): - Conditional OR. - AND (2): - Conditional AND. - """ - LOGICAL_OPERATOR_UNSPECIFIED = 0 - OR = 1 - AND = 2 - - logical_operator: 'DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator' = proto.Field( - proto.ENUM, - number=1, - enum='DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator', - ) - conditions: MutableSequence['DataProfilePubSubCondition.PubSubCondition'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='DataProfilePubSubCondition.PubSubCondition', - ) - - expressions: PubSubExpressions = proto.Field( - proto.MESSAGE, - number=1, - message=PubSubExpressions, - ) - - -class DataProfilePubSubMessage(proto.Message): - r"""Pub/Sub topic message for a - DataProfileAction.PubSubNotification event. To receive a message - of protocol buffer schema type, convert the message data to an - object of this proto class. - - Attributes: - profile (google.cloud.dlp_v2.types.TableDataProfile): - If ``DetailLevel`` is ``TABLE_PROFILE`` this will be fully - populated. Otherwise, if ``DetailLevel`` is - ``RESOURCE_NAME``, then only ``name`` and ``full_resource`` - will be populated. - event (google.cloud.dlp_v2.types.DataProfileAction.EventType): - The event that caused the Pub/Sub message to - be sent. - """ - - profile: 'TableDataProfile' = proto.Field( - proto.MESSAGE, - number=1, - message='TableDataProfile', - ) - event: 'DataProfileAction.EventType' = proto.Field( - proto.ENUM, - number=2, - enum='DataProfileAction.EventType', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py deleted file mode 100644 index fdb81846..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py +++ /dev/null @@ -1,1474 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.privacy.dlp.v2', - manifest={ - 'Likelihood', - 'FileType', - 'InfoType', - 'SensitivityScore', - 'StoredType', - 'CustomInfoType', - 'FieldId', - 'PartitionId', - 'KindExpression', - 'DatastoreOptions', - 'CloudStorageRegexFileSet', - 'CloudStorageOptions', - 'CloudStorageFileSet', - 'CloudStoragePath', - 'BigQueryOptions', - 'StorageConfig', - 'HybridOptions', - 'BigQueryKey', - 'DatastoreKey', - 'Key', - 'RecordKey', - 'BigQueryTable', - 'BigQueryField', - 'EntityId', - 'TableOptions', - }, -) - - -class Likelihood(proto.Enum): - r"""Categorization of results based on how likely they are to - represent a match, based on the number of elements they contain - which imply a match. - - Values: - LIKELIHOOD_UNSPECIFIED (0): - Default value; same as POSSIBLE. - VERY_UNLIKELY (1): - Few matching elements. - UNLIKELY (2): - No description available. - POSSIBLE (3): - Some matching elements. - LIKELY (4): - No description available. - VERY_LIKELY (5): - Many matching elements. - """ - LIKELIHOOD_UNSPECIFIED = 0 - VERY_UNLIKELY = 1 - UNLIKELY = 2 - POSSIBLE = 3 - LIKELY = 4 - VERY_LIKELY = 5 - - -class FileType(proto.Enum): - r"""Definitions of file type groups to scan. New types will be - added to this list. - - Values: - FILE_TYPE_UNSPECIFIED (0): - Includes all files. - BINARY_FILE (1): - Includes all file extensions not covered by another entry. - Binary scanning attempts to convert the content of the file - to utf_8 to scan the file. If you wish to avoid this fall - back, specify one or more of the other FileType's in your - storage scan. - TEXT_FILE (2): - Included file extensions: - asc,asp, aspx, brf, c, cc,cfm, cgi, cpp, csv, - cxx, c++, cs, css, dart, dat, dot, eml,, - epbub, ged, go, h, hh, hpp, hxx, h++, hs, html, - htm, mkd, markdown, m, ml, mli, perl, pl, - plist, pm, php, phtml, pht, properties, py, - pyw, rb, rbw, rs, rss, rc, scala, sh, sql, - swift, tex, shtml, shtm, xhtml, lhs, ics, ini, - java, js, json, kix, kml, ocaml, md, txt, - text, tsv, vb, vcard, vcs, wml, xcodeproj, xml, - xsl, xsd, yml, yaml. - IMAGE (3): - Included file extensions: bmp, gif, jpg, jpeg, jpe, png. - bytes_limit_per_file has no effect on image files. Image - inspection is restricted to 'global', 'us', 'asia', and - 'europe'. - WORD (5): - Word files >30 MB will be scanned as binary - files. Included file extensions: - docx, dotx, docm, dotm - PDF (6): - PDF files >30 MB will be scanned as binary - files. Included file extensions: - pdf - AVRO (7): - Included file extensions: - avro - CSV (8): - Included file extensions: - csv - TSV (9): - Included file extensions: - tsv - POWERPOINT (11): - Powerpoint files >30 MB will be scanned as - binary files. Included file extensions: - pptx, pptm, potx, potm, pot - EXCEL (12): - Excel files >30 MB will be scanned as binary - files. Included file extensions: - xlsx, xlsm, xltx, xltm - """ - FILE_TYPE_UNSPECIFIED = 0 - BINARY_FILE = 1 - TEXT_FILE = 2 - IMAGE = 3 - WORD = 5 - PDF = 6 - AVRO = 7 - CSV = 8 - TSV = 9 - POWERPOINT = 11 - EXCEL = 12 - - -class InfoType(proto.Message): - r"""Type of information detected by the API. - - Attributes: - name (str): - Name of the information type. Either a name of your choosing - when creating a CustomInfoType, or one of the names listed - at https://cloud.google.com/dlp/docs/infotypes-reference - when specifying a built-in type. When sending Cloud DLP - results to Data Catalog, infoType names should conform to - the pattern ``[A-Za-z0-9$_-]{1,64}``. - version (str): - Optional version name for this InfoType. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - version: str = proto.Field( - proto.STRING, - number=2, - ) - - -class SensitivityScore(proto.Message): - r"""Score is a summary of all elements in the data profile. - A higher number means more sensitive. - - Attributes: - score (google.cloud.dlp_v2.types.SensitivityScore.SensitivityScoreLevel): - The score applied to the resource. - """ - class SensitivityScoreLevel(proto.Enum): - r"""Various score levels for resources. - - Values: - SENSITIVITY_SCORE_UNSPECIFIED (0): - Unused. - SENSITIVITY_LOW (10): - No sensitive information detected. Limited - access. - SENSITIVITY_MODERATE (20): - Medium risk - PII, potentially sensitive - data, or fields with free-text data that are at - higher risk of having intermittent sensitive - data. Consider limiting access. - SENSITIVITY_HIGH (30): - High risk – SPII may be present. Exfiltration - of data may lead to user data loss. - Re-identification of users may be possible. - Consider limiting usage and or removing SPII. - """ - SENSITIVITY_SCORE_UNSPECIFIED = 0 - SENSITIVITY_LOW = 10 - SENSITIVITY_MODERATE = 20 - SENSITIVITY_HIGH = 30 - - score: SensitivityScoreLevel = proto.Field( - proto.ENUM, - number=1, - enum=SensitivityScoreLevel, - ) - - -class StoredType(proto.Message): - r"""A reference to a StoredInfoType to use with scanning. - - Attributes: - name (str): - Resource name of the requested ``StoredInfoType``, for - example - ``organizations/433245324/storedInfoTypes/432452342`` or - ``projects/project-id/storedInfoTypes/432452342``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp indicating when the version of the - ``StoredInfoType`` used for inspection was created. - Output-only field, populated by the system. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class CustomInfoType(proto.Message): - r"""Custom information type provided by the user. Used to find - domain-specific sensitive information configurable to the data - in question. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - CustomInfoType can either be a new infoType, or an extension - of built-in infoType, when the name matches one of existing - infoTypes and that infoType is specified in - ``InspectContent.info_types`` field. Specifying the latter - adds findings to the one detected by the system. If built-in - info type is not specified in ``InspectContent.info_types`` - list then the name is treated as a custom info type. - likelihood (google.cloud.dlp_v2.types.Likelihood): - Likelihood to return for this CustomInfoType. This base - value can be altered by a detection rule if the finding - meets the criteria specified by the rule. Defaults to - ``VERY_LIKELY`` if not specified. - dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): - A list of phrases to detect as a - CustomInfoType. - - This field is a member of `oneof`_ ``type``. - regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression based CustomInfoType. - - This field is a member of `oneof`_ ``type``. - surrogate_type (google.cloud.dlp_v2.types.CustomInfoType.SurrogateType): - Message for detecting output from - deidentification transformations that support - reversing. - - This field is a member of `oneof`_ ``type``. - stored_type (google.cloud.dlp_v2.types.StoredType): - Load an existing ``StoredInfoType`` resource for use in - ``InspectDataSource``. Not currently supported in - ``InspectContent``. - - This field is a member of `oneof`_ ``type``. - detection_rules (MutableSequence[google.cloud.dlp_v2.types.CustomInfoType.DetectionRule]): - Set of detection rules to apply to all findings of this - CustomInfoType. Rules are applied in order that they are - specified. Not supported for the ``surrogate_type`` - CustomInfoType. - exclusion_type (google.cloud.dlp_v2.types.CustomInfoType.ExclusionType): - If set to EXCLUSION_TYPE_EXCLUDE this infoType will not - cause a finding to be returned. It still can be used for - rules matching. - """ - class ExclusionType(proto.Enum): - r""" - - Values: - EXCLUSION_TYPE_UNSPECIFIED (0): - A finding of this custom info type will not - be excluded from results. - EXCLUSION_TYPE_EXCLUDE (1): - A finding of this custom info type will be - excluded from final results, but can still - affect rule execution. - """ - EXCLUSION_TYPE_UNSPECIFIED = 0 - EXCLUSION_TYPE_EXCLUDE = 1 - - class Dictionary(proto.Message): - r"""Custom information type based on a dictionary of words or phrases. - This can be used to match sensitive information specific to the - data, such as a list of employee IDs or job titles. - - Dictionary words are case-insensitive and all characters other than - letters and digits in the unicode `Basic Multilingual - Plane `__ - will be replaced with whitespace when scanning for matches, so the - dictionary phrase "Sam Johnson" will match all three phrases "sam - johnson", "Sam, Johnson", and "Sam (Johnson)". Additionally, the - characters surrounding any match must be of a different type than - the adjacent characters within the word, so letters must be next to - non-letters and digits next to non-digits. For example, the - dictionary word "jen" will match the first three letters of the text - "jen123" but will return no matches for "jennifer". - - Dictionary words containing a large number of characters that are - not letters or digits may result in unexpected findings because such - characters are treated as whitespace. The - `limits `__ page contains - details about the size limits of dictionaries. For dictionaries that - do not fit within these constraints, consider using - ``LargeCustomDictionaryConfig`` in the ``StoredInfoType`` API. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): - List of words or phrases to search for. - - This field is a member of `oneof`_ ``source``. - cloud_storage_path (google.cloud.dlp_v2.types.CloudStoragePath): - Newline-delimited file of words in Cloud - Storage. Only a single file is accepted. - - This field is a member of `oneof`_ ``source``. - """ - - class WordList(proto.Message): - r"""Message defining a list of words or phrases to search for in - the data. - - Attributes: - words (MutableSequence[str]): - Words or phrases defining the dictionary. The dictionary - must contain at least one phrase and every phrase must - contain at least 2 characters that are letters or digits. - [required] - """ - - words: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - word_list: 'CustomInfoType.Dictionary.WordList' = proto.Field( - proto.MESSAGE, - number=1, - oneof='source', - message='CustomInfoType.Dictionary.WordList', - ) - cloud_storage_path: 'CloudStoragePath' = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message='CloudStoragePath', - ) - - class Regex(proto.Message): - r"""Message defining a custom regular expression. - - Attributes: - pattern (str): - Pattern defining the regular expression. Its - syntax - (https://github.com/google/re2/wiki/Syntax) can - be found under the google/re2 repository on - GitHub. - group_indexes (MutableSequence[int]): - The index of the submatch to extract as - findings. When not specified, the entire match - is returned. No more than 3 may be included. - """ - - pattern: str = proto.Field( - proto.STRING, - number=1, - ) - group_indexes: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=2, - ) - - class SurrogateType(proto.Message): - r"""Message for detecting output from deidentification transformations - such as - ```CryptoReplaceFfxFpeConfig`` `__. - These types of transformations are those that perform - pseudonymization, thereby producing a "surrogate" as output. This - should be used in conjunction with a field on the transformation - such as ``surrogate_info_type``. This CustomInfoType does not - support the use of ``detection_rules``. - - """ - - class DetectionRule(proto.Message): - r"""Deprecated; use ``InspectionRuleSet`` instead. Rule for modifying a - ``CustomInfoType`` to alter behavior under certain circumstances, - depending on the specific details of the rule. Not supported for the - ``surrogate_type`` custom infoType. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): - Hotword-based detection rule. - - This field is a member of `oneof`_ ``type``. - """ - - class Proximity(proto.Message): - r"""Message for specifying a window around a finding to apply a - detection rule. - - Attributes: - window_before (int): - Number of characters before the finding to consider. For - tabular data, if you want to modify the likelihood of an - entire column of findngs, set this to 1. For more - information, see [Hotword example: Set the match likelihood - of a table column] - (https://cloud.google.com/dlp/docs/creating-custom-infotypes-likelihood#match-column-values). - window_after (int): - Number of characters after the finding to - consider. - """ - - window_before: int = proto.Field( - proto.INT32, - number=1, - ) - window_after: int = proto.Field( - proto.INT32, - number=2, - ) - - class LikelihoodAdjustment(proto.Message): - r"""Message for specifying an adjustment to the likelihood of a - finding as part of a detection rule. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - fixed_likelihood (google.cloud.dlp_v2.types.Likelihood): - Set the likelihood of a finding to a fixed - value. - - This field is a member of `oneof`_ ``adjustment``. - relative_likelihood (int): - Increase or decrease the likelihood by the specified number - of levels. For example, if a finding would be ``POSSIBLE`` - without the detection rule and ``relative_likelihood`` is 1, - then it is upgraded to ``LIKELY``, while a value of -1 would - downgrade it to ``UNLIKELY``. Likelihood may never drop - below ``VERY_UNLIKELY`` or exceed ``VERY_LIKELY``, so - applying an adjustment of 1 followed by an adjustment of -1 - when base likelihood is ``VERY_LIKELY`` will result in a - final likelihood of ``LIKELY``. - - This field is a member of `oneof`_ ``adjustment``. - """ - - fixed_likelihood: 'Likelihood' = proto.Field( - proto.ENUM, - number=1, - oneof='adjustment', - enum='Likelihood', - ) - relative_likelihood: int = proto.Field( - proto.INT32, - number=2, - oneof='adjustment', - ) - - class HotwordRule(proto.Message): - r"""The rule that adjusts the likelihood of findings within a - certain proximity of hotwords. - - Attributes: - hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression pattern defining what - qualifies as a hotword. - proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): - Range of characters within which the entire hotword must - reside. The total length of the window cannot exceed 1000 - characters. The finding itself will be included in the - window, so that hotwords can be used to match substrings of - the finding itself. Suppose you want Cloud DLP to promote - the likelihood of the phone number regex "(\d{3}) - \\d{3}-\d{4}" if the area code is known to be the area code - of a company's office. In this case, use the hotword regex - "(xxx)", where "xxx" is the area code in question. - - For tabular data, if you want to modify the likelihood of an - entire column of findngs, see [Hotword example: Set the - match likelihood of a table column] - (https://cloud.google.com/dlp/docs/creating-custom-infotypes-likelihood#match-column-values). - likelihood_adjustment (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.LikelihoodAdjustment): - Likelihood adjustment to apply to all - matching findings. - """ - - hotword_regex: 'CustomInfoType.Regex' = proto.Field( - proto.MESSAGE, - number=1, - message='CustomInfoType.Regex', - ) - proximity: 'CustomInfoType.DetectionRule.Proximity' = proto.Field( - proto.MESSAGE, - number=2, - message='CustomInfoType.DetectionRule.Proximity', - ) - likelihood_adjustment: 'CustomInfoType.DetectionRule.LikelihoodAdjustment' = proto.Field( - proto.MESSAGE, - number=3, - message='CustomInfoType.DetectionRule.LikelihoodAdjustment', - ) - - hotword_rule: 'CustomInfoType.DetectionRule.HotwordRule' = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message='CustomInfoType.DetectionRule.HotwordRule', - ) - - info_type: 'InfoType' = proto.Field( - proto.MESSAGE, - number=1, - message='InfoType', - ) - likelihood: 'Likelihood' = proto.Field( - proto.ENUM, - number=6, - enum='Likelihood', - ) - dictionary: Dictionary = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message=Dictionary, - ) - regex: Regex = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message=Regex, - ) - surrogate_type: SurrogateType = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message=SurrogateType, - ) - stored_type: 'StoredType' = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message='StoredType', - ) - detection_rules: MutableSequence[DetectionRule] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message=DetectionRule, - ) - exclusion_type: ExclusionType = proto.Field( - proto.ENUM, - number=8, - enum=ExclusionType, - ) - - -class FieldId(proto.Message): - r"""General identifier of a data field in a storage service. - - Attributes: - name (str): - Name describing the field. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class PartitionId(proto.Message): - r"""Datastore partition ID. - A partition ID identifies a grouping of entities. The grouping - is always by project and namespace, however the namespace ID may - be empty. - A partition ID contains several dimensions: - project ID and namespace ID. - - Attributes: - project_id (str): - The ID of the project to which the entities - belong. - namespace_id (str): - If not empty, the ID of the namespace to - which the entities belong. - """ - - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - namespace_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class KindExpression(proto.Message): - r"""A representation of a Datastore kind. - - Attributes: - name (str): - The name of the kind. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DatastoreOptions(proto.Message): - r"""Options defining a data set within Google Cloud Datastore. - - Attributes: - partition_id (google.cloud.dlp_v2.types.PartitionId): - A partition ID identifies a grouping of - entities. The grouping is always by project and - namespace, however the namespace ID may be - empty. - kind (google.cloud.dlp_v2.types.KindExpression): - The kind to process. - """ - - partition_id: 'PartitionId' = proto.Field( - proto.MESSAGE, - number=1, - message='PartitionId', - ) - kind: 'KindExpression' = proto.Field( - proto.MESSAGE, - number=2, - message='KindExpression', - ) - - -class CloudStorageRegexFileSet(proto.Message): - r"""Message representing a set of files in a Cloud Storage bucket. - Regular expressions are used to allow fine-grained control over - which files in the bucket to include. - - Included files are those that match at least one item in - ``include_regex`` and do not match any items in ``exclude_regex``. - Note that a file that matches items from both lists will *not* be - included. For a match to occur, the entire file path (i.e., - everything in the url after the bucket name) must match the regular - expression. - - For example, given the input - ``{bucket_name: "mybucket", include_regex: ["directory1/.*"], exclude_regex: ["directory1/excluded.*"]}``: - - - ``gs://mybucket/directory1/myfile`` will be included - - ``gs://mybucket/directory1/directory2/myfile`` will be included - (``.*`` matches across ``/``) - - ``gs://mybucket/directory0/directory1/myfile`` will *not* be - included (the full path doesn't match any items in - ``include_regex``) - - ``gs://mybucket/directory1/excludedfile`` will *not* be included - (the path matches an item in ``exclude_regex``) - - If ``include_regex`` is left empty, it will match all files by - default (this is equivalent to setting ``include_regex: [".*"]``). - - Some other common use cases: - - - ``{bucket_name: "mybucket", exclude_regex: [".*\.pdf"]}`` will - include all files in ``mybucket`` except for .pdf files - - ``{bucket_name: "mybucket", include_regex: ["directory/[^/]+"]}`` - will include all files directly under - ``gs://mybucket/directory/``, without matching across ``/`` - - Attributes: - bucket_name (str): - The name of a Cloud Storage bucket. Required. - include_regex (MutableSequence[str]): - A list of regular expressions matching file paths to - include. All files in the bucket that match at least one of - these regular expressions will be included in the set of - files, except for those that also match an item in - ``exclude_regex``. Leaving this field empty will match all - files by default (this is equivalent to including ``.*`` in - the list). - - Regular expressions use RE2 - `syntax `__; a - guide can be found under the google/re2 repository on - GitHub. - exclude_regex (MutableSequence[str]): - A list of regular expressions matching file paths to - exclude. All files in the bucket that match at least one of - these regular expressions will be excluded from the scan. - - Regular expressions use RE2 - `syntax `__; a - guide can be found under the google/re2 repository on - GitHub. - """ - - bucket_name: str = proto.Field( - proto.STRING, - number=1, - ) - include_regex: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - exclude_regex: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class CloudStorageOptions(proto.Message): - r"""Options defining a file or a set of files within a Cloud - Storage bucket. - - Attributes: - file_set (google.cloud.dlp_v2.types.CloudStorageOptions.FileSet): - The set of one or more files to scan. - bytes_limit_per_file (int): - Max number of bytes to scan from a file. If a scanned file's - size is bigger than this value then the rest of the bytes - are omitted. Only one of bytes_limit_per_file and - bytes_limit_per_file_percent can be specified. Cannot be set - if de-identification is requested. - bytes_limit_per_file_percent (int): - Max percentage of bytes to scan from a file. The rest are - omitted. The number of bytes scanned is rounded down. Must - be between 0 and 100, inclusively. Both 0 and 100 means no - limit. Defaults to 0. Only one of bytes_limit_per_file and - bytes_limit_per_file_percent can be specified. Cannot be set - if de-identification is requested. - file_types (MutableSequence[google.cloud.dlp_v2.types.FileType]): - List of file type groups to include in the scan. If empty, - all files are scanned and available data format processors - are applied. In addition, the binary content of the selected - files is always scanned as well. Images are scanned only as - binary if the specified region does not support image - inspection and no file_types were specified. Image - inspection is restricted to 'global', 'us', 'asia', and - 'europe'. - sample_method (google.cloud.dlp_v2.types.CloudStorageOptions.SampleMethod): - - files_limit_percent (int): - Limits the number of files to scan to this - percentage of the input FileSet. Number of files - scanned is rounded down. Must be between 0 and - 100, inclusively. Both 0 and 100 means no limit. - Defaults to 0. - """ - class SampleMethod(proto.Enum): - r"""How to sample bytes if not all bytes are scanned. Meaningful only - when used in conjunction with bytes_limit_per_file. If not - specified, scanning would start from the top. - - Values: - SAMPLE_METHOD_UNSPECIFIED (0): - No description available. - TOP (1): - Scan from the top (default). - RANDOM_START (2): - For each file larger than bytes_limit_per_file, randomly - pick the offset to start scanning. The scanned bytes are - contiguous. - """ - SAMPLE_METHOD_UNSPECIFIED = 0 - TOP = 1 - RANDOM_START = 2 - - class FileSet(proto.Message): - r"""Set of files to scan. - - Attributes: - url (str): - The Cloud Storage url of the file(s) to scan, in the format - ``gs:///``. Trailing wildcard in the path is - allowed. - - If the url ends in a trailing slash, the bucket or directory - represented by the url will be scanned non-recursively - (content in sub-directories will not be scanned). This means - that ``gs://mybucket/`` is equivalent to - ``gs://mybucket/*``, and ``gs://mybucket/directory/`` is - equivalent to ``gs://mybucket/directory/*``. - - Exactly one of ``url`` or ``regex_file_set`` must be set. - regex_file_set (google.cloud.dlp_v2.types.CloudStorageRegexFileSet): - The regex-filtered set of files to scan. Exactly one of - ``url`` or ``regex_file_set`` must be set. - """ - - url: str = proto.Field( - proto.STRING, - number=1, - ) - regex_file_set: 'CloudStorageRegexFileSet' = proto.Field( - proto.MESSAGE, - number=2, - message='CloudStorageRegexFileSet', - ) - - file_set: FileSet = proto.Field( - proto.MESSAGE, - number=1, - message=FileSet, - ) - bytes_limit_per_file: int = proto.Field( - proto.INT64, - number=4, - ) - bytes_limit_per_file_percent: int = proto.Field( - proto.INT32, - number=8, - ) - file_types: MutableSequence['FileType'] = proto.RepeatedField( - proto.ENUM, - number=5, - enum='FileType', - ) - sample_method: SampleMethod = proto.Field( - proto.ENUM, - number=6, - enum=SampleMethod, - ) - files_limit_percent: int = proto.Field( - proto.INT32, - number=7, - ) - - -class CloudStorageFileSet(proto.Message): - r"""Message representing a set of files in Cloud Storage. - - Attributes: - url (str): - The url, in the format ``gs:///``. Trailing - wildcard in the path is allowed. - """ - - url: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CloudStoragePath(proto.Message): - r"""Message representing a single file or path in Cloud Storage. - - Attributes: - path (str): - A url representing a file or path (no wildcards) in Cloud - Storage. Example: gs://[BUCKET_NAME]/dictionary.txt - """ - - path: str = proto.Field( - proto.STRING, - number=1, - ) - - -class BigQueryOptions(proto.Message): - r"""Options defining BigQuery table and row identifiers. - - Attributes: - table_reference (google.cloud.dlp_v2.types.BigQueryTable): - Complete BigQuery table reference. - identifying_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Table fields that may uniquely identify a row within the - table. When ``actions.saveFindings.outputConfig.table`` is - specified, the values of columns specified here are - available in the output table under - ``location.content_locations.record_location.record_key.id_values``. - Nested fields such as ``person.birthdate.year`` are allowed. - rows_limit (int): - Max number of rows to scan. If the table has more rows than - this value, the rest of the rows are omitted. If not set, or - if set to 0, all rows will be scanned. Only one of - rows_limit and rows_limit_percent can be specified. Cannot - be used in conjunction with TimespanConfig. - rows_limit_percent (int): - Max percentage of rows to scan. The rest are omitted. The - number of rows scanned is rounded down. Must be between 0 - and 100, inclusively. Both 0 and 100 means no limit. - Defaults to 0. Only one of rows_limit and rows_limit_percent - can be specified. Cannot be used in conjunction with - TimespanConfig. - sample_method (google.cloud.dlp_v2.types.BigQueryOptions.SampleMethod): - - excluded_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - References to fields excluded from scanning. - This allows you to skip inspection of entire - columns which you know have no findings. - included_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Limit scanning only to these fields. - """ - class SampleMethod(proto.Enum): - r"""How to sample rows if not all rows are scanned. Meaningful only when - used in conjunction with either rows_limit or rows_limit_percent. If - not specified, rows are scanned in the order BigQuery reads them. - - Values: - SAMPLE_METHOD_UNSPECIFIED (0): - No description available. - TOP (1): - Scan groups of rows in the order BigQuery - provides (default). Multiple groups of rows may - be scanned in parallel, so results may not - appear in the same order the rows are read. - RANDOM_START (2): - Randomly pick groups of rows to scan. - """ - SAMPLE_METHOD_UNSPECIFIED = 0 - TOP = 1 - RANDOM_START = 2 - - table_reference: 'BigQueryTable' = proto.Field( - proto.MESSAGE, - number=1, - message='BigQueryTable', - ) - identifying_fields: MutableSequence['FieldId'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='FieldId', - ) - rows_limit: int = proto.Field( - proto.INT64, - number=3, - ) - rows_limit_percent: int = proto.Field( - proto.INT32, - number=6, - ) - sample_method: SampleMethod = proto.Field( - proto.ENUM, - number=4, - enum=SampleMethod, - ) - excluded_fields: MutableSequence['FieldId'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='FieldId', - ) - included_fields: MutableSequence['FieldId'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='FieldId', - ) - - -class StorageConfig(proto.Message): - r"""Shared message indicating Cloud storage type. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - datastore_options (google.cloud.dlp_v2.types.DatastoreOptions): - Google Cloud Datastore options. - - This field is a member of `oneof`_ ``type``. - cloud_storage_options (google.cloud.dlp_v2.types.CloudStorageOptions): - Cloud Storage options. - - This field is a member of `oneof`_ ``type``. - big_query_options (google.cloud.dlp_v2.types.BigQueryOptions): - BigQuery options. - - This field is a member of `oneof`_ ``type``. - hybrid_options (google.cloud.dlp_v2.types.HybridOptions): - Hybrid inspection options. - - This field is a member of `oneof`_ ``type``. - timespan_config (google.cloud.dlp_v2.types.StorageConfig.TimespanConfig): - - """ - - class TimespanConfig(proto.Message): - r"""Configuration of the timespan of the items to include in - scanning. Currently only supported when inspecting Cloud Storage - and BigQuery. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - Exclude files, tables, or rows older than - this value. If not set, no lower time limit is - applied. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Exclude files, tables, or rows newer than - this value. If not set, no upper time limit is - applied. - timestamp_field (google.cloud.dlp_v2.types.FieldId): - Specification of the field containing the timestamp of - scanned items. Used for data sources like Datastore and - BigQuery. - - For BigQuery - - If this value is not specified and the table was modified - between the given start and end times, the entire table will - be scanned. If this value is specified, then rows are - filtered based on the given start and end times. Rows with a - ``NULL`` value in the provided BigQuery column are skipped. - Valid data types of the provided BigQuery column are: - ``INTEGER``, ``DATE``, ``TIMESTAMP``, and ``DATETIME``. - - If your BigQuery table is `partitioned at ingestion - time `__, - you can use any of the following pseudo-columns as your - timestamp field. When used with Cloud DLP, these - pseudo-column names are case sensitive. - - .. raw:: html - -
    -
  • _PARTITIONTIME
  • -
  • _PARTITIONDATE
  • -
  • _PARTITION_LOAD_TIME
  • -
- - For Datastore - - If this value is specified, then entities are filtered based - on the given start and end times. If an entity does not - contain the provided timestamp property or contains empty or - invalid values, then it is included. Valid data types of the - provided timestamp property are: ``TIMESTAMP``. - - See the `known - issue `__ - related to this operation. - enable_auto_population_of_timespan_config (bool): - When the job is started by a JobTrigger we will - automatically figure out a valid start_time to avoid - scanning files that have not been modified since the last - time the JobTrigger executed. This will be based on the time - of the execution of the last run of the JobTrigger or the - timespan end_time used in the last run of the JobTrigger. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - timestamp_field: 'FieldId' = proto.Field( - proto.MESSAGE, - number=3, - message='FieldId', - ) - enable_auto_population_of_timespan_config: bool = proto.Field( - proto.BOOL, - number=4, - ) - - datastore_options: 'DatastoreOptions' = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message='DatastoreOptions', - ) - cloud_storage_options: 'CloudStorageOptions' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='CloudStorageOptions', - ) - big_query_options: 'BigQueryOptions' = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message='BigQueryOptions', - ) - hybrid_options: 'HybridOptions' = proto.Field( - proto.MESSAGE, - number=9, - oneof='type', - message='HybridOptions', - ) - timespan_config: TimespanConfig = proto.Field( - proto.MESSAGE, - number=6, - message=TimespanConfig, - ) - - -class HybridOptions(proto.Message): - r"""Configuration to control jobs where the content being - inspected is outside of Google Cloud Platform. - - Attributes: - description (str): - A short description of where the data is - coming from. Will be stored once in the job. 256 - max length. - required_finding_label_keys (MutableSequence[str]): - These are labels that each inspection request must include - within their 'finding_labels' map. Request may contain - others, but any missing one of these will be rejected. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - No more than 10 keys can be required. - labels (MutableMapping[str, str]): - To organize findings, these labels will be added to each - finding. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 10 labels can be associated with a given - finding. - - Examples: - - - ``"environment" : "production"`` - - ``"pipeline" : "etl"`` - table_options (google.cloud.dlp_v2.types.TableOptions): - If the container is a table, additional - information to make findings meaningful such as - the columns that are primary keys. - """ - - description: str = proto.Field( - proto.STRING, - number=1, - ) - required_finding_label_keys: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - table_options: 'TableOptions' = proto.Field( - proto.MESSAGE, - number=4, - message='TableOptions', - ) - - -class BigQueryKey(proto.Message): - r"""Row key for identifying a record in BigQuery table. - - Attributes: - table_reference (google.cloud.dlp_v2.types.BigQueryTable): - Complete BigQuery table reference. - row_number (int): - Row number inferred at the time the table was scanned. This - value is nondeterministic, cannot be queried, and may be - null for inspection jobs. To locate findings within a table, - specify - ``inspect_job.storage_config.big_query_options.identifying_fields`` - in ``CreateDlpJobRequest``. - """ - - table_reference: 'BigQueryTable' = proto.Field( - proto.MESSAGE, - number=1, - message='BigQueryTable', - ) - row_number: int = proto.Field( - proto.INT64, - number=2, - ) - - -class DatastoreKey(proto.Message): - r"""Record key for a finding in Cloud Datastore. - - Attributes: - entity_key (google.cloud.dlp_v2.types.Key): - Datastore entity key. - """ - - entity_key: 'Key' = proto.Field( - proto.MESSAGE, - number=1, - message='Key', - ) - - -class Key(proto.Message): - r"""A unique identifier for a Datastore entity. - If a key's partition ID or any of its path kinds or names are - reserved/read-only, the key is reserved/read-only. - A reserved/read-only key is forbidden in certain documented - contexts. - - Attributes: - partition_id (google.cloud.dlp_v2.types.PartitionId): - Entities are partitioned into subsets, - currently identified by a project ID and - namespace ID. Queries are scoped to a single - partition. - path (MutableSequence[google.cloud.dlp_v2.types.Key.PathElement]): - The entity path. An entity path consists of one or more - elements composed of a kind and a string or numerical - identifier, which identify entities. The first element - identifies a *root entity*, the second element identifies a - *child* of the root entity, the third element identifies a - child of the second entity, and so forth. The entities - identified by all prefixes of the path are called the - element's *ancestors*. - - A path can never be empty, and a path can have at most 100 - elements. - """ - - class PathElement(proto.Message): - r"""A (kind, ID/name) pair used to construct a key path. - If either name or ID is set, the element is complete. If neither - is set, the element is incomplete. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - kind (str): - The kind of the entity. A kind matching regex ``__.*__`` is - reserved/read-only. A kind must not contain more than 1500 - bytes when UTF-8 encoded. Cannot be ``""``. - id (int): - The auto-allocated ID of the entity. - Never equal to zero. Values less than zero are - discouraged and may not be supported in the - future. - - This field is a member of `oneof`_ ``id_type``. - name (str): - The name of the entity. A name matching regex ``__.*__`` is - reserved/read-only. A name must not be more than 1500 bytes - when UTF-8 encoded. Cannot be ``""``. - - This field is a member of `oneof`_ ``id_type``. - """ - - kind: str = proto.Field( - proto.STRING, - number=1, - ) - id: int = proto.Field( - proto.INT64, - number=2, - oneof='id_type', - ) - name: str = proto.Field( - proto.STRING, - number=3, - oneof='id_type', - ) - - partition_id: 'PartitionId' = proto.Field( - proto.MESSAGE, - number=1, - message='PartitionId', - ) - path: MutableSequence[PathElement] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=PathElement, - ) - - -class RecordKey(proto.Message): - r"""Message for a unique key indicating a record that contains a - finding. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - datastore_key (google.cloud.dlp_v2.types.DatastoreKey): - - This field is a member of `oneof`_ ``type``. - big_query_key (google.cloud.dlp_v2.types.BigQueryKey): - - This field is a member of `oneof`_ ``type``. - id_values (MutableSequence[str]): - Values of identifying columns in the given row. Order of - values matches the order of ``identifying_fields`` specified - in the scanning request. - """ - - datastore_key: 'DatastoreKey' = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message='DatastoreKey', - ) - big_query_key: 'BigQueryKey' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='BigQueryKey', - ) - id_values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - - -class BigQueryTable(proto.Message): - r"""Message defining the location of a BigQuery table. A table is - uniquely identified by its project_id, dataset_id, and table_name. - Within a query a table is often referenced with a string in the - format of: ``:.`` or - ``..``. - - Attributes: - project_id (str): - The Google Cloud Platform project ID of the - project containing the table. If omitted, - project ID is inferred from the API call. - dataset_id (str): - Dataset ID of the table. - table_id (str): - Name of the table. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - dataset_id: str = proto.Field( - proto.STRING, - number=2, - ) - table_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class BigQueryField(proto.Message): - r"""Message defining a field of a BigQuery table. - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Source table of the field. - field (google.cloud.dlp_v2.types.FieldId): - Designated field in the BigQuery table. - """ - - table: 'BigQueryTable' = proto.Field( - proto.MESSAGE, - number=1, - message='BigQueryTable', - ) - field: 'FieldId' = proto.Field( - proto.MESSAGE, - number=2, - message='FieldId', - ) - - -class EntityId(proto.Message): - r"""An entity in a dataset is a field or set of fields that correspond - to a single person. For example, in medical records the ``EntityId`` - might be a patient identifier, or for financial records it might be - an account identifier. This message is used when generalizations or - analysis must take into account that multiple rows correspond to the - same entity. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Composite key indicating which field contains - the entity identifier. - """ - - field: 'FieldId' = proto.Field( - proto.MESSAGE, - number=1, - message='FieldId', - ) - - -class TableOptions(proto.Message): - r"""Instructions regarding the table content being inspected. - - Attributes: - identifying_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - The columns that are the primary keys for - table objects included in ContentItem. A copy of - this cell's value will stored alongside - alongside each finding so that the finding can - be traced to the specific row it came from. No - more than 3 may be provided. - """ - - identifying_fields: MutableSequence['FieldId'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='FieldId', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/mypy.ini b/owl-bot-staging/v2/mypy.ini deleted file mode 100644 index 574c5aed..00000000 --- a/owl-bot-staging/v2/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/v2/noxfile.py b/owl-bot-staging/v2/noxfile.py deleted file mode 100644 index 6b1462df..00000000 --- a/owl-bot-staging/v2/noxfile.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "lint_setup_py", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/dlp_v2/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py deleted file mode 100644 index e4371abf..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ActivateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ActivateJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_activate_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ActivateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.activate_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ActivateJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py deleted file mode 100644 index c0b4fac1..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ActivateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ActivateJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_activate_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ActivateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.activate_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ActivateJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py deleted file mode 100644 index d8190299..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CancelDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_cancel_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CancelDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.cancel_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_CancelDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py deleted file mode 100644 index 7475d6fa..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CancelDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_cancel_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CancelDlpJobRequest( - name="name_value", - ) - - # Make the request - client.cancel_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_CancelDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py deleted file mode 100644 index 81ad2519..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDeidentifyTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py deleted file mode 100644 index b394f634..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDeidentifyTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py deleted file mode 100644 index 28770717..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDlpJobRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py deleted file mode 100644 index 779754f6..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDlpJobRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py deleted file mode 100644 index aeb40676..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateInspectTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateInspectTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py deleted file mode 100644 index 0e344b36..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateInspectTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateInspectTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py deleted file mode 100644 index 3e82b8f3..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - job_trigger = dlp_v2.JobTrigger() - job_trigger.status = "CANCELLED" - - request = dlp_v2.CreateJobTriggerRequest( - parent="parent_value", - job_trigger=job_trigger, - ) - - # Make the request - response = await client.create_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py deleted file mode 100644 index ebb74284..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - job_trigger = dlp_v2.JobTrigger() - job_trigger.status = "CANCELLED" - - request = dlp_v2.CreateJobTriggerRequest( - parent="parent_value", - job_trigger=job_trigger, - ) - - # Make the request - response = client.create_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py deleted file mode 100644 index cae6db89..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateStoredInfoType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateStoredInfoTypeRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py deleted file mode 100644 index d59a301d..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateStoredInfoType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateStoredInfoTypeRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py deleted file mode 100644 index 4903b032..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeidentifyContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeidentifyContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_deidentify_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeidentifyContentRequest( - ) - - # Make the request - response = await client.deidentify_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_DeidentifyContent_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py deleted file mode 100644 index 2422616c..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeidentifyContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeidentifyContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_deidentify_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeidentifyContentRequest( - ) - - # Make the request - response = client.deidentify_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_DeidentifyContent_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py deleted file mode 100644 index f544f12d..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_deidentify_template(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py deleted file mode 100644 index a33f3b26..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_deidentify_template(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py deleted file mode 100644 index 8737125b..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.delete_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py deleted file mode 100644 index bb0ce9df..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDlpJobRequest( - name="name_value", - ) - - # Make the request - client.delete_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py deleted file mode 100644 index f0aec8eb..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteInspectTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteInspectTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_inspect_template(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py deleted file mode 100644 index c908d867..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteInspectTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteInspectTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_inspect_template(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py deleted file mode 100644 index 3784ee3e..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteJobTriggerRequest( - name="name_value", - ) - - # Make the request - await client.delete_job_trigger(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py deleted file mode 100644 index 9f4405da..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteJobTriggerRequest( - name="name_value", - ) - - # Make the request - client.delete_job_trigger(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py deleted file mode 100644 index 652d88ff..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteStoredInfoType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - await client.delete_stored_info_type(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py deleted file mode 100644 index 7e37ce36..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteStoredInfoType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - client.delete_stored_info_type(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py deleted file mode 100644 index 869504da..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FinishDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_FinishDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_finish_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.FinishDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.finish_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_FinishDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py deleted file mode 100644 index 1b694f90..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FinishDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_FinishDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_finish_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.FinishDlpJobRequest( - name="name_value", - ) - - # Make the request - client.finish_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_FinishDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py deleted file mode 100644 index fc1570d3..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetDeidentifyTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py deleted file mode 100644 index bb1e1986..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py deleted file mode 100644 index 2065aa85..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetDlpJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py deleted file mode 100644 index 13959bde..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetDlpJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py deleted file mode 100644 index 1a9c9649..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetInspectTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py deleted file mode 100644 index 112e3d83..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetInspectTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py deleted file mode 100644 index 248184c7..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py deleted file mode 100644 index 9c6cdb3a..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.get_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py deleted file mode 100644 index a7820fe2..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetStoredInfoType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py deleted file mode 100644 index d0b0a44c..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetStoredInfoType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = client.get_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py deleted file mode 100644 index e9f9be5a..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for HybridInspectDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_HybridInspectDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_hybrid_inspect_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectDlpJobRequest( - name="name_value", - ) - - # Make the request - response = await client.hybrid_inspect_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_HybridInspectDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py deleted file mode 100644 index 2bfd7fe1..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for HybridInspectDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_HybridInspectDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_hybrid_inspect_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectDlpJobRequest( - name="name_value", - ) - - # Make the request - response = client.hybrid_inspect_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_HybridInspectDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py deleted file mode 100644 index dbdd91c2..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for HybridInspectJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_HybridInspectJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_hybrid_inspect_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.hybrid_inspect_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_HybridInspectJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py deleted file mode 100644 index a9c4c85e..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for HybridInspectJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_hybrid_inspect_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.hybrid_inspect_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py deleted file mode 100644 index 3f24588b..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for InspectContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_InspectContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_inspect_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.InspectContentRequest( - ) - - # Make the request - response = await client.inspect_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_InspectContent_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py deleted file mode 100644 index 4b5a10f3..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for InspectContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_InspectContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_inspect_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.InspectContentRequest( - ) - - # Make the request - response = client.inspect_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_InspectContent_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py deleted file mode 100644 index d1a40dc0..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDeidentifyTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListDeidentifyTemplates_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_deidentify_templates(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListDeidentifyTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_deidentify_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListDeidentifyTemplates_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py deleted file mode 100644 index 6a01f0fb..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDeidentifyTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_deidentify_templates(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListDeidentifyTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_deidentify_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py deleted file mode 100644 index 57c790d8..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDlpJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListDlpJobs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_dlp_jobs(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListDlpJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_dlp_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListDlpJobs_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py deleted file mode 100644 index 7d06c237..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDlpJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListDlpJobs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_dlp_jobs(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListDlpJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_dlp_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListDlpJobs_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py deleted file mode 100644 index 16b871f8..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInfoTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListInfoTypes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_info_types(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListInfoTypesRequest( - ) - - # Make the request - response = await client.list_info_types(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ListInfoTypes_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py deleted file mode 100644 index 9e3ca167..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInfoTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListInfoTypes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_info_types(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListInfoTypesRequest( - ) - - # Make the request - response = client.list_info_types(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ListInfoTypes_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py deleted file mode 100644 index 6e405a4f..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInspectTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListInspectTemplates_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_inspect_templates(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListInspectTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_inspect_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListInspectTemplates_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py deleted file mode 100644 index 71673677..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInspectTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListInspectTemplates_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_inspect_templates(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListInspectTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_inspect_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListInspectTemplates_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py deleted file mode 100644 index e8c0281f..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobTriggers -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListJobTriggers_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_job_triggers(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListJobTriggersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_triggers(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListJobTriggers_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py deleted file mode 100644 index 0f9141c0..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobTriggers -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListJobTriggers_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_job_triggers(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListJobTriggersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_triggers(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListJobTriggers_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py deleted file mode 100644 index 460c99c4..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListStoredInfoTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListStoredInfoTypes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_stored_info_types(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListStoredInfoTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_stored_info_types(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListStoredInfoTypes_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py deleted file mode 100644 index 1ad1796e..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListStoredInfoTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListStoredInfoTypes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_stored_info_types(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListStoredInfoTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_stored_info_types(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListStoredInfoTypes_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py deleted file mode 100644 index a7a0d502..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RedactImage -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_RedactImage_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_redact_image(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.RedactImageRequest( - ) - - # Make the request - response = await client.redact_image(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_RedactImage_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py deleted file mode 100644 index 272bdb80..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RedactImage -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_RedactImage_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_redact_image(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.RedactImageRequest( - ) - - # Make the request - response = client.redact_image(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_RedactImage_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py deleted file mode 100644 index 401f62df..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReidentifyContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ReidentifyContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_reidentify_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ReidentifyContentRequest( - parent="parent_value", - ) - - # Make the request - response = await client.reidentify_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ReidentifyContent_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py deleted file mode 100644 index 9e654be9..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReidentifyContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ReidentifyContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_reidentify_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ReidentifyContentRequest( - parent="parent_value", - ) - - # Make the request - response = client.reidentify_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ReidentifyContent_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py deleted file mode 100644 index 8b32186c..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_update_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.update_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py deleted file mode 100644 index e3296531..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_update_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.update_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py deleted file mode 100644 index 8e062116..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateInspectTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_update_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.update_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py deleted file mode 100644 index 332c5de6..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateInspectTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_update_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.update_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py deleted file mode 100644 index 58baaeeb..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_update_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.update_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py deleted file mode 100644 index 3694b5ff..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_update_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.update_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py deleted file mode 100644 index d5658d32..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateStoredInfoType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_update_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.update_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py deleted file mode 100644 index 9471180b..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateStoredInfoType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_update_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = client.update_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json b/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json deleted file mode 100644 index 956f9eab..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json +++ /dev/null @@ -1,5503 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.privacy.dlp.v2", - "version": "v2" - } - ], - "language": "PYTHON", - "name": "google-cloud-dlp", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.activate_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ActivateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ActivateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ActivateJobTriggerRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "activate_job_trigger" - }, - "description": "Sample for ActivateJobTrigger", - "file": "dlp_v2_generated_dlp_service_activate_job_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ActivateJobTrigger_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_activate_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.activate_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ActivateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ActivateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ActivateJobTriggerRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "activate_job_trigger" - }, - "description": "Sample for ActivateJobTrigger", - "file": "dlp_v2_generated_dlp_service_activate_job_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ActivateJobTrigger_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_activate_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.cancel_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CancelDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CancelDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CancelDlpJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "cancel_dlp_job" - }, - "description": "Sample for CancelDlpJob", - "file": "dlp_v2_generated_dlp_service_cancel_dlp_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CancelDlpJob_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_cancel_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.cancel_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CancelDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CancelDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CancelDlpJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "cancel_dlp_job" - }, - "description": "Sample for CancelDlpJob", - "file": "dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CancelDlpJob_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "deidentify_template", - "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "create_deidentify_template" - }, - "description": "Sample for CreateDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_create_deidentify_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_deidentify_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "deidentify_template", - "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "create_deidentify_template" - }, - "description": "Sample for CreateDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_create_deidentify_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_deidentify_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateDlpJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "inspect_job", - "type": "google.cloud.dlp_v2.types.InspectJobConfig" - }, - { - "name": "risk_job", - "type": "google.cloud.dlp_v2.types.RiskAnalysisJobConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "create_dlp_job" - }, - "description": "Sample for CreateDlpJob", - "file": "dlp_v2_generated_dlp_service_create_dlp_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateDlpJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateDlpJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "inspect_job", - "type": "google.cloud.dlp_v2.types.InspectJobConfig" - }, - { - "name": "risk_job", - "type": "google.cloud.dlp_v2.types.RiskAnalysisJobConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "create_dlp_job" - }, - "description": "Sample for CreateDlpJob", - "file": "dlp_v2_generated_dlp_service_create_dlp_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateDlpJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateInspectTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "inspect_template", - "type": "google.cloud.dlp_v2.types.InspectTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "create_inspect_template" - }, - "description": "Sample for CreateInspectTemplate", - "file": "dlp_v2_generated_dlp_service_create_inspect_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateInspectTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_inspect_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateInspectTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "inspect_template", - "type": "google.cloud.dlp_v2.types.InspectTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "create_inspect_template" - }, - "description": "Sample for CreateInspectTemplate", - "file": "dlp_v2_generated_dlp_service_create_inspect_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateInspectTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_inspect_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateJobTriggerRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job_trigger", - "type": "google.cloud.dlp_v2.types.JobTrigger" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "create_job_trigger" - }, - "description": "Sample for CreateJobTrigger", - "file": "dlp_v2_generated_dlp_service_create_job_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateJobTrigger_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateJobTriggerRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job_trigger", - "type": "google.cloud.dlp_v2.types.JobTrigger" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "create_job_trigger" - }, - "description": "Sample for CreateJobTrigger", - "file": "dlp_v2_generated_dlp_service_create_job_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateJobTrigger_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "config", - "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "create_stored_info_type" - }, - "description": "Sample for CreateStoredInfoType", - "file": "dlp_v2_generated_dlp_service_create_stored_info_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateStoredInfoType_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_stored_info_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "config", - "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "create_stored_info_type" - }, - "description": "Sample for CreateStoredInfoType", - "file": "dlp_v2_generated_dlp_service_create_stored_info_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateStoredInfoType_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_stored_info_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.deidentify_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeidentifyContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeidentifyContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeidentifyContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyContentResponse", - "shortName": "deidentify_content" - }, - "description": "Sample for DeidentifyContent", - "file": "dlp_v2_generated_dlp_service_deidentify_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeidentifyContent_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_deidentify_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.deidentify_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeidentifyContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeidentifyContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeidentifyContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyContentResponse", - "shortName": "deidentify_content" - }, - "description": "Sample for DeidentifyContent", - "file": "dlp_v2_generated_dlp_service_deidentify_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeidentifyContent_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_deidentify_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_deidentify_template" - }, - "description": "Sample for DeleteDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_delete_deidentify_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_deidentify_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_deidentify_template" - }, - "description": "Sample for DeleteDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_dlp_job" - }, - "description": "Sample for DeleteDlpJob", - "file": "dlp_v2_generated_dlp_service_delete_dlp_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteDlpJob_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_dlp_job" - }, - "description": "Sample for DeleteDlpJob", - "file": "dlp_v2_generated_dlp_service_delete_dlp_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteDlpJob_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_inspect_template" - }, - "description": "Sample for DeleteInspectTemplate", - "file": "dlp_v2_generated_dlp_service_delete_inspect_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteInspectTemplate_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_inspect_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_inspect_template" - }, - "description": "Sample for DeleteInspectTemplate", - "file": "dlp_v2_generated_dlp_service_delete_inspect_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteInspectTemplate_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_inspect_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_job_trigger" - }, - "description": "Sample for DeleteJobTrigger", - "file": "dlp_v2_generated_dlp_service_delete_job_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteJobTrigger_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_job_trigger" - }, - "description": "Sample for DeleteJobTrigger", - "file": "dlp_v2_generated_dlp_service_delete_job_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteJobTrigger_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_stored_info_type" - }, - "description": "Sample for DeleteStoredInfoType", - "file": "dlp_v2_generated_dlp_service_delete_stored_info_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteStoredInfoType_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_stored_info_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_stored_info_type" - }, - "description": "Sample for DeleteStoredInfoType", - "file": "dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteStoredInfoType_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.finish_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.FinishDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "FinishDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.FinishDlpJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "finish_dlp_job" - }, - "description": "Sample for FinishDlpJob", - "file": "dlp_v2_generated_dlp_service_finish_dlp_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_FinishDlpJob_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_finish_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.finish_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.FinishDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "FinishDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.FinishDlpJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "finish_dlp_job" - }, - "description": "Sample for FinishDlpJob", - "file": "dlp_v2_generated_dlp_service_finish_dlp_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_FinishDlpJob_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_finish_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "get_deidentify_template" - }, - "description": "Sample for GetDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_get_deidentify_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetDeidentifyTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_deidentify_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "get_deidentify_template" - }, - "description": "Sample for GetDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_get_deidentify_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_deidentify_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "get_dlp_job" - }, - "description": "Sample for GetDlpJob", - "file": "dlp_v2_generated_dlp_service_get_dlp_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetDlpJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "get_dlp_job" - }, - "description": "Sample for GetDlpJob", - "file": "dlp_v2_generated_dlp_service_get_dlp_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetDlpJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "get_inspect_template" - }, - "description": "Sample for GetInspectTemplate", - "file": "dlp_v2_generated_dlp_service_get_inspect_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetInspectTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_inspect_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "get_inspect_template" - }, - "description": "Sample for GetInspectTemplate", - "file": "dlp_v2_generated_dlp_service_get_inspect_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetInspectTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_inspect_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "get_job_trigger" - }, - "description": "Sample for GetJobTrigger", - "file": "dlp_v2_generated_dlp_service_get_job_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetJobTrigger_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "get_job_trigger" - }, - "description": "Sample for GetJobTrigger", - "file": "dlp_v2_generated_dlp_service_get_job_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetJobTrigger_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "get_stored_info_type" - }, - "description": "Sample for GetStoredInfoType", - "file": "dlp_v2_generated_dlp_service_get_stored_info_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetStoredInfoType_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_stored_info_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "get_stored_info_type" - }, - "description": "Sample for GetStoredInfoType", - "file": "dlp_v2_generated_dlp_service_get_stored_info_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetStoredInfoType_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_stored_info_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.hybrid_inspect_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "HybridInspectDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.HybridInspectDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", - "shortName": "hybrid_inspect_dlp_job" - }, - "description": "Sample for HybridInspectDlpJob", - "file": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_HybridInspectDlpJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.hybrid_inspect_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "HybridInspectDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.HybridInspectDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", - "shortName": "hybrid_inspect_dlp_job" - }, - "description": "Sample for HybridInspectDlpJob", - "file": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_HybridInspectDlpJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.hybrid_inspect_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "HybridInspectJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", - "shortName": "hybrid_inspect_job_trigger" - }, - "description": "Sample for HybridInspectJobTrigger", - "file": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_HybridInspectJobTrigger_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.hybrid_inspect_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "HybridInspectJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", - "shortName": "hybrid_inspect_job_trigger" - }, - "description": "Sample for HybridInspectJobTrigger", - "file": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.inspect_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.InspectContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "InspectContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.InspectContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectContentResponse", - "shortName": "inspect_content" - }, - "description": "Sample for InspectContent", - "file": "dlp_v2_generated_dlp_service_inspect_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_InspectContent_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_inspect_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.inspect_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.InspectContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "InspectContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.InspectContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectContentResponse", - "shortName": "inspect_content" - }, - "description": "Sample for InspectContent", - "file": "dlp_v2_generated_dlp_service_inspect_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_InspectContent_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_inspect_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_deidentify_templates", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListDeidentifyTemplates", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListDeidentifyTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager", - "shortName": "list_deidentify_templates" - }, - "description": "Sample for ListDeidentifyTemplates", - "file": "dlp_v2_generated_dlp_service_list_deidentify_templates_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListDeidentifyTemplates_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_deidentify_templates_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_deidentify_templates", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListDeidentifyTemplates", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListDeidentifyTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager", - "shortName": "list_deidentify_templates" - }, - "description": "Sample for ListDeidentifyTemplates", - "file": "dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_dlp_jobs", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListDlpJobs", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListDlpJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListDlpJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager", - "shortName": "list_dlp_jobs" - }, - "description": "Sample for ListDlpJobs", - "file": "dlp_v2_generated_dlp_service_list_dlp_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListDlpJobs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_dlp_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_dlp_jobs", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListDlpJobs", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListDlpJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListDlpJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager", - "shortName": "list_dlp_jobs" - }, - "description": "Sample for ListDlpJobs", - "file": "dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListDlpJobs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_info_types", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListInfoTypes", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListInfoTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListInfoTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ListInfoTypesResponse", - "shortName": "list_info_types" - }, - "description": "Sample for ListInfoTypes", - "file": "dlp_v2_generated_dlp_service_list_info_types_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListInfoTypes_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_info_types_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_info_types", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListInfoTypes", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListInfoTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListInfoTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ListInfoTypesResponse", - "shortName": "list_info_types" - }, - "description": "Sample for ListInfoTypes", - "file": "dlp_v2_generated_dlp_service_list_info_types_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListInfoTypes_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_info_types_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_inspect_templates", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListInspectTemplates", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListInspectTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListInspectTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager", - "shortName": "list_inspect_templates" - }, - "description": "Sample for ListInspectTemplates", - "file": "dlp_v2_generated_dlp_service_list_inspect_templates_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListInspectTemplates_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_inspect_templates_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_inspect_templates", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListInspectTemplates", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListInspectTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListInspectTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager", - "shortName": "list_inspect_templates" - }, - "description": "Sample for ListInspectTemplates", - "file": "dlp_v2_generated_dlp_service_list_inspect_templates_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListInspectTemplates_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_inspect_templates_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_job_triggers", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListJobTriggers", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListJobTriggers" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListJobTriggersRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager", - "shortName": "list_job_triggers" - }, - "description": "Sample for ListJobTriggers", - "file": "dlp_v2_generated_dlp_service_list_job_triggers_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListJobTriggers_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_job_triggers_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_job_triggers", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListJobTriggers", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListJobTriggers" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListJobTriggersRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager", - "shortName": "list_job_triggers" - }, - "description": "Sample for ListJobTriggers", - "file": "dlp_v2_generated_dlp_service_list_job_triggers_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListJobTriggers_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_job_triggers_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_stored_info_types", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListStoredInfoTypes", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListStoredInfoTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListStoredInfoTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager", - "shortName": "list_stored_info_types" - }, - "description": "Sample for ListStoredInfoTypes", - "file": "dlp_v2_generated_dlp_service_list_stored_info_types_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListStoredInfoTypes_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_stored_info_types_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_stored_info_types", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListStoredInfoTypes", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListStoredInfoTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListStoredInfoTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager", - "shortName": "list_stored_info_types" - }, - "description": "Sample for ListStoredInfoTypes", - "file": "dlp_v2_generated_dlp_service_list_stored_info_types_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListStoredInfoTypes_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_stored_info_types_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.redact_image", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.RedactImage", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "RedactImage" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.RedactImageRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.RedactImageResponse", - "shortName": "redact_image" - }, - "description": "Sample for RedactImage", - "file": "dlp_v2_generated_dlp_service_redact_image_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_RedactImage_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_redact_image_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.redact_image", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.RedactImage", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "RedactImage" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.RedactImageRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.RedactImageResponse", - "shortName": "redact_image" - }, - "description": "Sample for RedactImage", - "file": "dlp_v2_generated_dlp_service_redact_image_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_RedactImage_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_redact_image_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.reidentify_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ReidentifyContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ReidentifyContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ReidentifyContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ReidentifyContentResponse", - "shortName": "reidentify_content" - }, - "description": "Sample for ReidentifyContent", - "file": "dlp_v2_generated_dlp_service_reidentify_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ReidentifyContent_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_reidentify_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.reidentify_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ReidentifyContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ReidentifyContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ReidentifyContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ReidentifyContentResponse", - "shortName": "reidentify_content" - }, - "description": "Sample for ReidentifyContent", - "file": "dlp_v2_generated_dlp_service_reidentify_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ReidentifyContent_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_reidentify_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "deidentify_template", - "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "update_deidentify_template" - }, - "description": "Sample for UpdateDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_update_deidentify_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_deidentify_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "deidentify_template", - "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "update_deidentify_template" - }, - "description": "Sample for UpdateDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_update_deidentify_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_deidentify_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "inspect_template", - "type": "google.cloud.dlp_v2.types.InspectTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "update_inspect_template" - }, - "description": "Sample for UpdateInspectTemplate", - "file": "dlp_v2_generated_dlp_service_update_inspect_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateInspectTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_inspect_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "inspect_template", - "type": "google.cloud.dlp_v2.types.InspectTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "update_inspect_template" - }, - "description": "Sample for UpdateInspectTemplate", - "file": "dlp_v2_generated_dlp_service_update_inspect_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateInspectTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_inspect_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "job_trigger", - "type": "google.cloud.dlp_v2.types.JobTrigger" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "update_job_trigger" - }, - "description": "Sample for UpdateJobTrigger", - "file": "dlp_v2_generated_dlp_service_update_job_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateJobTrigger_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "job_trigger", - "type": "google.cloud.dlp_v2.types.JobTrigger" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "update_job_trigger" - }, - "description": "Sample for UpdateJobTrigger", - "file": "dlp_v2_generated_dlp_service_update_job_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateJobTrigger_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "config", - "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "update_stored_info_type" - }, - "description": "Sample for UpdateStoredInfoType", - "file": "dlp_v2_generated_dlp_service_update_stored_info_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateStoredInfoType_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_stored_info_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "config", - "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "update_stored_info_type" - }, - "description": "Sample for UpdateStoredInfoType", - "file": "dlp_v2_generated_dlp_service_update_stored_info_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateStoredInfoType_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_stored_info_type_sync.py" - } - ] -} diff --git a/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py b/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py deleted file mode 100644 index 9adcd0d5..00000000 --- a/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py +++ /dev/null @@ -1,209 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class dlpCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'activate_job_trigger': ('name', ), - 'cancel_dlp_job': ('name', ), - 'create_deidentify_template': ('parent', 'deidentify_template', 'template_id', 'location_id', ), - 'create_dlp_job': ('parent', 'inspect_job', 'risk_job', 'job_id', 'location_id', ), - 'create_inspect_template': ('parent', 'inspect_template', 'template_id', 'location_id', ), - 'create_job_trigger': ('parent', 'job_trigger', 'trigger_id', 'location_id', ), - 'create_stored_info_type': ('parent', 'config', 'stored_info_type_id', 'location_id', ), - 'deidentify_content': ('parent', 'deidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'deidentify_template_name', 'location_id', ), - 'delete_deidentify_template': ('name', ), - 'delete_dlp_job': ('name', ), - 'delete_inspect_template': ('name', ), - 'delete_job_trigger': ('name', ), - 'delete_stored_info_type': ('name', ), - 'finish_dlp_job': ('name', ), - 'get_deidentify_template': ('name', ), - 'get_dlp_job': ('name', ), - 'get_inspect_template': ('name', ), - 'get_job_trigger': ('name', ), - 'get_stored_info_type': ('name', ), - 'hybrid_inspect_dlp_job': ('name', 'hybrid_item', ), - 'hybrid_inspect_job_trigger': ('name', 'hybrid_item', ), - 'inspect_content': ('parent', 'inspect_config', 'item', 'inspect_template_name', 'location_id', ), - 'list_deidentify_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'list_dlp_jobs': ('parent', 'filter', 'page_size', 'page_token', 'type_', 'order_by', 'location_id', ), - 'list_info_types': ('parent', 'language_code', 'filter', 'location_id', ), - 'list_inspect_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'list_job_triggers': ('parent', 'page_token', 'page_size', 'order_by', 'filter', 'type_', 'location_id', ), - 'list_stored_info_types': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'redact_image': ('parent', 'location_id', 'inspect_config', 'image_redaction_configs', 'include_findings', 'byte_item', ), - 'reidentify_content': ('parent', 'reidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'reidentify_template_name', 'location_id', ), - 'update_deidentify_template': ('name', 'deidentify_template', 'update_mask', ), - 'update_inspect_template': ('name', 'inspect_template', 'update_mask', ), - 'update_job_trigger': ('name', 'job_trigger', 'update_mask', ), - 'update_stored_info_type': ('name', 'config', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=dlpCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the dlp client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v2/setup.py b/owl-bot-staging/v2/setup.py deleted file mode 100644 index 2b4eb21b..00000000 --- a/owl-bot-staging/v2/setup.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-dlp' - - -description = "Google Cloud Dlp API client library" - -version = {} -with open(os.path.join(package_root, 'google/cloud/dlp/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -url = "https://github.com/googleapis/python-dlp" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") -] - -namespaces = ["google", "google.cloud"] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - namespace_packages=namespaces, - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/v2/testing/constraints-3.10.txt b/owl-bot-staging/v2/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.11.txt b/owl-bot-staging/v2/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.12.txt b/owl-bot-staging/v2/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.7.txt b/owl-bot-staging/v2/testing/constraints-3.7.txt deleted file mode 100644 index 6c44adfe..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.7.txt +++ /dev/null @@ -1,9 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -proto-plus==1.22.0 -protobuf==3.19.5 diff --git a/owl-bot-staging/v2/testing/constraints-3.8.txt b/owl-bot-staging/v2/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.9.txt b/owl-bot-staging/v2/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v2/tests/__init__.py b/owl-bot-staging/v2/tests/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v2/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/__init__.py b/owl-bot-staging/v2/tests/unit/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v2/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py deleted file mode 100644 index 64618efd..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py +++ /dev/null @@ -1,17404 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dlp_v2.services.dlp_service import DlpServiceAsyncClient -from google.cloud.dlp_v2.services.dlp_service import DlpServiceClient -from google.cloud.dlp_v2.services.dlp_service import pagers -from google.cloud.dlp_v2.services.dlp_service import transports -from google.cloud.dlp_v2.types import dlp -from google.cloud.dlp_v2.types import storage -from google.cloud.location import locations_pb2 -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import date_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore -from google.type import timeofday_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DlpServiceClient._get_default_mtls_endpoint(None) is None - assert DlpServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DlpServiceClient, "grpc"), - (DlpServiceAsyncClient, "grpc_asyncio"), - (DlpServiceClient, "rest"), -]) -def test_dlp_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dlp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dlp.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DlpServiceGrpcTransport, "grpc"), - (transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.DlpServiceRestTransport, "rest"), -]) -def test_dlp_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DlpServiceClient, "grpc"), - (DlpServiceAsyncClient, "grpc_asyncio"), - (DlpServiceClient, "rest"), -]) -def test_dlp_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dlp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dlp.googleapis.com' - ) - - -def test_dlp_service_client_get_transport_class(): - transport = DlpServiceClient.get_transport_class() - available_transports = [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceRestTransport, - ] - assert transport in available_transports - - transport = DlpServiceClient.get_transport_class("grpc") - assert transport == transports.DlpServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), -]) -@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) -@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) -def test_dlp_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "true"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "false"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "true"), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) -@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_dlp_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DlpServiceClient, DlpServiceAsyncClient -]) -@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) -@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) -def test_dlp_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), -]) -def test_dlp_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", grpc_helpers), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest", None), -]) -def test_dlp_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_dlp_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DlpServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", grpc_helpers), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_dlp_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dlp.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dlp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.InspectContentRequest, - dict, -]) -def test_inspect_content(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectContentResponse( - ) - response = client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.InspectContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectContentResponse) - - -def test_inspect_content_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - client.inspect_content() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.InspectContentRequest() - -@pytest.mark.asyncio -async def test_inspect_content_async(transport: str = 'grpc_asyncio', request_type=dlp.InspectContentRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse( - )) - response = await client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.InspectContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectContentResponse) - - -@pytest.mark.asyncio -async def test_inspect_content_async_from_dict(): - await test_inspect_content_async(request_type=dict) - - -def test_inspect_content_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.InspectContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - call.return_value = dlp.InspectContentResponse() - client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_inspect_content_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.InspectContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse()) - await client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.RedactImageRequest, - dict, -]) -def test_redact_image(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.RedactImageResponse( - redacted_image=b'redacted_image_blob', - extracted_text='extracted_text_value', - ) - response = client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.RedactImageRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.RedactImageResponse) - assert response.redacted_image == b'redacted_image_blob' - assert response.extracted_text == 'extracted_text_value' - - -def test_redact_image_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - client.redact_image() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.RedactImageRequest() - -@pytest.mark.asyncio -async def test_redact_image_async(transport: str = 'grpc_asyncio', request_type=dlp.RedactImageRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse( - redacted_image=b'redacted_image_blob', - extracted_text='extracted_text_value', - )) - response = await client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.RedactImageRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.RedactImageResponse) - assert response.redacted_image == b'redacted_image_blob' - assert response.extracted_text == 'extracted_text_value' - - -@pytest.mark.asyncio -async def test_redact_image_async_from_dict(): - await test_redact_image_async(request_type=dict) - - -def test_redact_image_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.RedactImageRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - call.return_value = dlp.RedactImageResponse() - client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_redact_image_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.RedactImageRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse()) - await client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.DeidentifyContentRequest, - dict, -]) -def test_deidentify_content(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyContentResponse( - ) - response = client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeidentifyContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyContentResponse) - - -def test_deidentify_content_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - client.deidentify_content() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeidentifyContentRequest() - -@pytest.mark.asyncio -async def test_deidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.DeidentifyContentRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse( - )) - response = await client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeidentifyContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyContentResponse) - - -@pytest.mark.asyncio -async def test_deidentify_content_async_from_dict(): - await test_deidentify_content_async(request_type=dict) - - -def test_deidentify_content_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeidentifyContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - call.return_value = dlp.DeidentifyContentResponse() - client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_deidentify_content_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeidentifyContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse()) - await client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.ReidentifyContentRequest, - dict, -]) -def test_reidentify_content(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ReidentifyContentResponse( - ) - response = client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ReidentifyContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ReidentifyContentResponse) - - -def test_reidentify_content_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - client.reidentify_content() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ReidentifyContentRequest() - -@pytest.mark.asyncio -async def test_reidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.ReidentifyContentRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse( - )) - response = await client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ReidentifyContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ReidentifyContentResponse) - - -@pytest.mark.asyncio -async def test_reidentify_content_async_from_dict(): - await test_reidentify_content_async(request_type=dict) - - -def test_reidentify_content_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ReidentifyContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - call.return_value = dlp.ReidentifyContentResponse() - client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_reidentify_content_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ReidentifyContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse()) - await client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.ListInfoTypesRequest, - dict, -]) -def test_list_info_types(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInfoTypesResponse( - ) - response = client.list_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInfoTypesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ListInfoTypesResponse) - - -def test_list_info_types_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - client.list_info_types() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInfoTypesRequest() - -@pytest.mark.asyncio -async def test_list_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInfoTypesRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse( - )) - response = await client.list_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInfoTypesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ListInfoTypesResponse) - - -@pytest.mark.asyncio -async def test_list_info_types_async_from_dict(): - await test_list_info_types_async(request_type=dict) - - -def test_list_info_types_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInfoTypesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_info_types_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_info_types( - dlp.ListInfoTypesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_info_types_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInfoTypesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_info_types_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_info_types( - dlp.ListInfoTypesRequest(), - parent='parent_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateInspectTemplateRequest, - dict, -]) -def test_create_inspect_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_inspect_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - client.create_inspect_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateInspectTemplateRequest() - -@pytest.mark.asyncio -async def test_create_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_create_inspect_template_async_from_dict(): - await test_create_inspect_template_async(request_type=dict) - - -def test_create_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateInspectTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - call.return_value = dlp.InspectTemplate() - client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateInspectTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - await client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_inspect_template( - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].inspect_template - mock_val = dlp.InspectTemplate(name='name_value') - assert arg == mock_val - - -def test_create_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_inspect_template( - dlp.CreateInspectTemplateRequest(), - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_inspect_template( - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].inspect_template - mock_val = dlp.InspectTemplate(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_inspect_template( - dlp.CreateInspectTemplateRequest(), - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateInspectTemplateRequest, - dict, -]) -def test_update_inspect_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_inspect_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - client.update_inspect_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateInspectTemplateRequest() - -@pytest.mark.asyncio -async def test_update_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_update_inspect_template_async_from_dict(): - await test_update_inspect_template_async(request_type=dict) - - -def test_update_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - call.return_value = dlp.InspectTemplate() - client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - await client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_inspect_template( - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].inspect_template - mock_val = dlp.InspectTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_inspect_template( - dlp.UpdateInspectTemplateRequest(), - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_inspect_template( - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].inspect_template - mock_val = dlp.InspectTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_inspect_template( - dlp.UpdateInspectTemplateRequest(), - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetInspectTemplateRequest, - dict, -]) -def test_get_inspect_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_inspect_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - client.get_inspect_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetInspectTemplateRequest() - -@pytest.mark.asyncio -async def test_get_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_get_inspect_template_async_from_dict(): - await test_get_inspect_template_async(request_type=dict) - - -def test_get_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - call.return_value = dlp.InspectTemplate() - client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - await client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_inspect_template( - dlp.GetInspectTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_inspect_template( - dlp.GetInspectTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListInspectTemplatesRequest, - dict, -]) -def test_list_inspect_templates(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInspectTemplatesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInspectTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInspectTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_inspect_templates_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - client.list_inspect_templates() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInspectTemplatesRequest() - -@pytest.mark.asyncio -async def test_list_inspect_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInspectTemplatesRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInspectTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInspectTemplatesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_inspect_templates_async_from_dict(): - await test_list_inspect_templates_async(request_type=dict) - - -def test_list_inspect_templates_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListInspectTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - call.return_value = dlp.ListInspectTemplatesResponse() - client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_inspect_templates_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListInspectTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) - await client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_inspect_templates_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInspectTemplatesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_inspect_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_inspect_templates_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_inspect_templates( - dlp.ListInspectTemplatesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_inspect_templates_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInspectTemplatesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_inspect_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_inspect_templates_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_inspect_templates( - dlp.ListInspectTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_inspect_templates_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_inspect_templates(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.InspectTemplate) - for i in results) -def test_list_inspect_templates_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - pages = list(client.list_inspect_templates(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_inspect_templates_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_inspect_templates(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.InspectTemplate) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_inspect_templates_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_inspect_templates(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteInspectTemplateRequest, - dict, -]) -def test_delete_inspect_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_inspect_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - client.delete_inspect_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteInspectTemplateRequest() - -@pytest.mark.asyncio -async def test_delete_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_inspect_template_async_from_dict(): - await test_delete_inspect_template_async(request_type=dict) - - -def test_delete_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - call.return_value = None - client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_inspect_template( - dlp.DeleteInspectTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_inspect_template( - dlp.DeleteInspectTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateDeidentifyTemplateRequest, - dict, -]) -def test_create_deidentify_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_deidentify_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - client.create_deidentify_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDeidentifyTemplateRequest() - -@pytest.mark.asyncio -async def test_create_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_create_deidentify_template_async_from_dict(): - await test_create_deidentify_template_async(request_type=dict) - - -def test_create_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDeidentifyTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - call.return_value = dlp.DeidentifyTemplate() - client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDeidentifyTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - await client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_deidentify_template( - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].deidentify_template - mock_val = dlp.DeidentifyTemplate(name='name_value') - assert arg == mock_val - - -def test_create_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_deidentify_template( - dlp.CreateDeidentifyTemplateRequest(), - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_deidentify_template( - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].deidentify_template - mock_val = dlp.DeidentifyTemplate(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_deidentify_template( - dlp.CreateDeidentifyTemplateRequest(), - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateDeidentifyTemplateRequest, - dict, -]) -def test_update_deidentify_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_deidentify_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - client.update_deidentify_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateDeidentifyTemplateRequest() - -@pytest.mark.asyncio -async def test_update_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_update_deidentify_template_async_from_dict(): - await test_update_deidentify_template_async(request_type=dict) - - -def test_update_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - call.return_value = dlp.DeidentifyTemplate() - client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - await client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_deidentify_template( - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].deidentify_template - mock_val = dlp.DeidentifyTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_deidentify_template( - dlp.UpdateDeidentifyTemplateRequest(), - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_deidentify_template( - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].deidentify_template - mock_val = dlp.DeidentifyTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_deidentify_template( - dlp.UpdateDeidentifyTemplateRequest(), - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetDeidentifyTemplateRequest, - dict, -]) -def test_get_deidentify_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_deidentify_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - client.get_deidentify_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDeidentifyTemplateRequest() - -@pytest.mark.asyncio -async def test_get_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_get_deidentify_template_async_from_dict(): - await test_get_deidentify_template_async(request_type=dict) - - -def test_get_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - call.return_value = dlp.DeidentifyTemplate() - client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - await client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_deidentify_template( - dlp.GetDeidentifyTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_deidentify_template( - dlp.GetDeidentifyTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListDeidentifyTemplatesRequest, - dict, -]) -def test_list_deidentify_templates(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDeidentifyTemplatesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDeidentifyTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeidentifyTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_deidentify_templates_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - client.list_deidentify_templates() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDeidentifyTemplatesRequest() - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDeidentifyTemplatesRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDeidentifyTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeidentifyTemplatesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async_from_dict(): - await test_list_deidentify_templates_async(request_type=dict) - - -def test_list_deidentify_templates_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDeidentifyTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - call.return_value = dlp.ListDeidentifyTemplatesResponse() - client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_deidentify_templates_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDeidentifyTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) - await client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_deidentify_templates_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDeidentifyTemplatesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_deidentify_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_deidentify_templates_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_deidentify_templates( - dlp.ListDeidentifyTemplatesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_deidentify_templates_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDeidentifyTemplatesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_deidentify_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_deidentify_templates_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_deidentify_templates( - dlp.ListDeidentifyTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_deidentify_templates_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_deidentify_templates(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.DeidentifyTemplate) - for i in results) -def test_list_deidentify_templates_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - pages = list(client.list_deidentify_templates(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_deidentify_templates(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.DeidentifyTemplate) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_deidentify_templates(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteDeidentifyTemplateRequest, - dict, -]) -def test_delete_deidentify_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_deidentify_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - client.delete_deidentify_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDeidentifyTemplateRequest() - -@pytest.mark.asyncio -async def test_delete_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_deidentify_template_async_from_dict(): - await test_delete_deidentify_template_async(request_type=dict) - - -def test_delete_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - call.return_value = None - client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_deidentify_template( - dlp.DeleteDeidentifyTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_deidentify_template( - dlp.DeleteDeidentifyTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateJobTriggerRequest, - dict, -]) -def test_create_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - response = client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_create_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - client.create_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateJobTriggerRequest() - -@pytest.mark.asyncio -async def test_create_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - )) - response = await client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -@pytest.mark.asyncio -async def test_create_job_trigger_async_from_dict(): - await test_create_job_trigger_async(request_type=dict) - - -def test_create_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateJobTriggerRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - call.return_value = dlp.JobTrigger() - client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateJobTriggerRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - await client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_job_trigger( - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].job_trigger - mock_val = dlp.JobTrigger(name='name_value') - assert arg == mock_val - - -def test_create_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job_trigger( - dlp.CreateJobTriggerRequest(), - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_job_trigger( - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].job_trigger - mock_val = dlp.JobTrigger(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_job_trigger( - dlp.CreateJobTriggerRequest(), - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateJobTriggerRequest, - dict, -]) -def test_update_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - response = client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_update_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - client.update_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateJobTriggerRequest() - -@pytest.mark.asyncio -async def test_update_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - )) - response = await client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -@pytest.mark.asyncio -async def test_update_job_trigger_async_from_dict(): - await test_update_job_trigger_async(request_type=dict) - - -def test_update_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - call.return_value = dlp.JobTrigger() - client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - await client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_job_trigger( - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].job_trigger - mock_val = dlp.JobTrigger(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_job_trigger( - dlp.UpdateJobTriggerRequest(), - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_job_trigger( - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].job_trigger - mock_val = dlp.JobTrigger(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_job_trigger( - dlp.UpdateJobTriggerRequest(), - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.HybridInspectJobTriggerRequest, - dict, -]) -def test_hybrid_inspect_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse( - ) - response = client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -def test_hybrid_inspect_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - client.hybrid_inspect_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectJobTriggerRequest() - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( - )) - response = await client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_async_from_dict(): - await test_hybrid_inspect_job_trigger_async(request_type=dict) - - -def test_hybrid_inspect_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - call.return_value = dlp.HybridInspectResponse() - client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - await client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_hybrid_inspect_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.hybrid_inspect_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_hybrid_inspect_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.hybrid_inspect_job_trigger( - dlp.HybridInspectJobTriggerRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.hybrid_inspect_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.hybrid_inspect_job_trigger( - dlp.HybridInspectJobTriggerRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetJobTriggerRequest, - dict, -]) -def test_get_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - response = client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_get_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - client.get_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetJobTriggerRequest() - -@pytest.mark.asyncio -async def test_get_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.GetJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - )) - response = await client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -@pytest.mark.asyncio -async def test_get_job_trigger_async_from_dict(): - await test_get_job_trigger_async(request_type=dict) - - -def test_get_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - call.return_value = dlp.JobTrigger() - client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - await client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job_trigger( - dlp.GetJobTriggerRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job_trigger( - dlp.GetJobTriggerRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListJobTriggersRequest, - dict, -]) -def test_list_job_triggers(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListJobTriggersResponse( - next_page_token='next_page_token_value', - ) - response = client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListJobTriggersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTriggersPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_job_triggers_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - client.list_job_triggers() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListJobTriggersRequest() - -@pytest.mark.asyncio -async def test_list_job_triggers_async(transport: str = 'grpc_asyncio', request_type=dlp.ListJobTriggersRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListJobTriggersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTriggersAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_job_triggers_async_from_dict(): - await test_list_job_triggers_async(request_type=dict) - - -def test_list_job_triggers_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListJobTriggersRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - call.return_value = dlp.ListJobTriggersResponse() - client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_job_triggers_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListJobTriggersRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) - await client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_job_triggers_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListJobTriggersResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_job_triggers( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_job_triggers_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_job_triggers( - dlp.ListJobTriggersRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_job_triggers_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListJobTriggersResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_job_triggers( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_job_triggers_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_job_triggers( - dlp.ListJobTriggersRequest(), - parent='parent_value', - ) - - -def test_list_job_triggers_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_job_triggers(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.JobTrigger) - for i in results) -def test_list_job_triggers_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - pages = list(client.list_job_triggers(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_job_triggers_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_job_triggers(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.JobTrigger) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_job_triggers_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_job_triggers(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteJobTriggerRequest, - dict, -]) -def test_delete_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - client.delete_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteJobTriggerRequest() - -@pytest.mark.asyncio -async def test_delete_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_job_trigger_async_from_dict(): - await test_delete_job_trigger_async(request_type=dict) - - -def test_delete_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - call.return_value = None - client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job_trigger( - dlp.DeleteJobTriggerRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_job_trigger( - dlp.DeleteJobTriggerRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ActivateJobTriggerRequest, - dict, -]) -def test_activate_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - response = client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ActivateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_activate_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - client.activate_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ActivateJobTriggerRequest() - -@pytest.mark.asyncio -async def test_activate_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.ActivateJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - )) - response = await client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ActivateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -@pytest.mark.asyncio -async def test_activate_job_trigger_async_from_dict(): - await test_activate_job_trigger_async(request_type=dict) - - -def test_activate_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ActivateJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - call.return_value = dlp.DlpJob() - client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_activate_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ActivateJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - await client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateDlpJobRequest, - dict, -]) -def test_create_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - response = client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_create_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - client.create_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDlpJobRequest() - -@pytest.mark.asyncio -async def test_create_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - )) - response = await client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -@pytest.mark.asyncio -async def test_create_dlp_job_async_from_dict(): - await test_create_dlp_job_async(request_type=dict) - - -def test_create_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDlpJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - call.return_value = dlp.DlpJob() - client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDlpJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - await client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_dlp_job( - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) - - -def test_create_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dlp_job( - dlp.CreateDlpJobRequest(), - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - -@pytest.mark.asyncio -async def test_create_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_dlp_job( - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) - -@pytest.mark.asyncio -async def test_create_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_dlp_job( - dlp.CreateDlpJobRequest(), - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListDlpJobsRequest, - dict, -]) -def test_list_dlp_jobs(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDlpJobsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDlpJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDlpJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_dlp_jobs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - client.list_dlp_jobs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDlpJobsRequest() - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDlpJobsRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDlpJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDlpJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async_from_dict(): - await test_list_dlp_jobs_async(request_type=dict) - - -def test_list_dlp_jobs_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDlpJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - call.return_value = dlp.ListDlpJobsResponse() - client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_dlp_jobs_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDlpJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) - await client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_dlp_jobs_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDlpJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_dlp_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_dlp_jobs_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_dlp_jobs( - dlp.ListDlpJobsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_dlp_jobs_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDlpJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_dlp_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_dlp_jobs_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_dlp_jobs( - dlp.ListDlpJobsRequest(), - parent='parent_value', - ) - - -def test_list_dlp_jobs_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_dlp_jobs(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.DlpJob) - for i in results) -def test_list_dlp_jobs_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - pages = list(client.list_dlp_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_dlp_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.DlpJob) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_dlp_jobs(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.GetDlpJobRequest, - dict, -]) -def test_get_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - response = client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_get_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - client.get_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDlpJobRequest() - -@pytest.mark.asyncio -async def test_get_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - )) - response = await client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -@pytest.mark.asyncio -async def test_get_dlp_job_async_from_dict(): - await test_get_dlp_job_async(request_type=dict) - - -def test_get_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - call.return_value = dlp.DlpJob() - client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - await client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_dlp_job( - dlp.GetDlpJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_dlp_job( - dlp.GetDlpJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteDlpJobRequest, - dict, -]) -def test_delete_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - client.delete_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDlpJobRequest() - -@pytest.mark.asyncio -async def test_delete_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_dlp_job_async_from_dict(): - await test_delete_dlp_job_async(request_type=dict) - - -def test_delete_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - call.return_value = None - client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dlp_job( - dlp.DeleteDlpJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_dlp_job( - dlp.DeleteDlpJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CancelDlpJobRequest, - dict, -]) -def test_cancel_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CancelDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_cancel_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - client.cancel_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CancelDlpJobRequest() - -@pytest.mark.asyncio -async def test_cancel_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CancelDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CancelDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_dlp_job_async_from_dict(): - await test_cancel_dlp_job_async(request_type=dict) - - -def test_cancel_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CancelDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - call.return_value = None - client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_cancel_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CancelDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateStoredInfoTypeRequest, - dict, -]) -def test_create_stored_info_type(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType( - name='name_value', - ) - response = client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_create_stored_info_type_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - client.create_stored_info_type() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateStoredInfoTypeRequest() - -@pytest.mark.asyncio -async def test_create_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( - name='name_value', - )) - response = await client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_create_stored_info_type_async_from_dict(): - await test_create_stored_info_type_async(request_type=dict) - - -def test_create_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateStoredInfoTypeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - call.return_value = dlp.StoredInfoType() - client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateStoredInfoTypeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - await client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_stored_info_type( - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].config - mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') - assert arg == mock_val - - -def test_create_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_stored_info_type( - dlp.CreateStoredInfoTypeRequest(), - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - -@pytest.mark.asyncio -async def test_create_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_stored_info_type( - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].config - mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_stored_info_type( - dlp.CreateStoredInfoTypeRequest(), - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateStoredInfoTypeRequest, - dict, -]) -def test_update_stored_info_type(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType( - name='name_value', - ) - response = client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_update_stored_info_type_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - client.update_stored_info_type() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateStoredInfoTypeRequest() - -@pytest.mark.asyncio -async def test_update_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( - name='name_value', - )) - response = await client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_update_stored_info_type_async_from_dict(): - await test_update_stored_info_type_async(request_type=dict) - - -def test_update_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - call.return_value = dlp.StoredInfoType() - client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - await client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_stored_info_type( - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].config - mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_stored_info_type( - dlp.UpdateStoredInfoTypeRequest(), - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_stored_info_type( - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].config - mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_stored_info_type( - dlp.UpdateStoredInfoTypeRequest(), - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetStoredInfoTypeRequest, - dict, -]) -def test_get_stored_info_type(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType( - name='name_value', - ) - response = client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_get_stored_info_type_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - client.get_stored_info_type() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetStoredInfoTypeRequest() - -@pytest.mark.asyncio -async def test_get_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.GetStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( - name='name_value', - )) - response = await client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_stored_info_type_async_from_dict(): - await test_get_stored_info_type_async(request_type=dict) - - -def test_get_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - call.return_value = dlp.StoredInfoType() - client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - await client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_stored_info_type( - dlp.GetStoredInfoTypeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_stored_info_type( - dlp.GetStoredInfoTypeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListStoredInfoTypesRequest, - dict, -]) -def test_list_stored_info_types(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListStoredInfoTypesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListStoredInfoTypesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListStoredInfoTypesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_stored_info_types_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - client.list_stored_info_types() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListStoredInfoTypesRequest() - -@pytest.mark.asyncio -async def test_list_stored_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListStoredInfoTypesRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListStoredInfoTypesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListStoredInfoTypesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_stored_info_types_async_from_dict(): - await test_list_stored_info_types_async(request_type=dict) - - -def test_list_stored_info_types_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListStoredInfoTypesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - call.return_value = dlp.ListStoredInfoTypesResponse() - client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_stored_info_types_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListStoredInfoTypesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) - await client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_stored_info_types_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListStoredInfoTypesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_stored_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_stored_info_types_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_stored_info_types( - dlp.ListStoredInfoTypesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_stored_info_types_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListStoredInfoTypesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_stored_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_stored_info_types_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_stored_info_types( - dlp.ListStoredInfoTypesRequest(), - parent='parent_value', - ) - - -def test_list_stored_info_types_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_stored_info_types(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.StoredInfoType) - for i in results) -def test_list_stored_info_types_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - pages = list(client.list_stored_info_types(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_stored_info_types_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_stored_info_types(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.StoredInfoType) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_stored_info_types_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_stored_info_types(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteStoredInfoTypeRequest, - dict, -]) -def test_delete_stored_info_type(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_stored_info_type_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - client.delete_stored_info_type() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteStoredInfoTypeRequest() - -@pytest.mark.asyncio -async def test_delete_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_stored_info_type_async_from_dict(): - await test_delete_stored_info_type_async(request_type=dict) - - -def test_delete_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - call.return_value = None - client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_stored_info_type( - dlp.DeleteStoredInfoTypeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_stored_info_type( - dlp.DeleteStoredInfoTypeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.HybridInspectDlpJobRequest, - dict, -]) -def test_hybrid_inspect_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse( - ) - response = client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -def test_hybrid_inspect_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - client.hybrid_inspect_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectDlpJobRequest() - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( - )) - response = await client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_async_from_dict(): - await test_hybrid_inspect_dlp_job_async(request_type=dict) - - -def test_hybrid_inspect_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - call.return_value = dlp.HybridInspectResponse() - client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - await client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_hybrid_inspect_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.hybrid_inspect_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_hybrid_inspect_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.hybrid_inspect_dlp_job( - dlp.HybridInspectDlpJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.hybrid_inspect_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.hybrid_inspect_dlp_job( - dlp.HybridInspectDlpJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.FinishDlpJobRequest, - dict, -]) -def test_finish_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.FinishDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_finish_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - client.finish_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.FinishDlpJobRequest() - -@pytest.mark.asyncio -async def test_finish_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.FinishDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.FinishDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_finish_dlp_job_async_from_dict(): - await test_finish_dlp_job_async(request_type=dict) - - -def test_finish_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.FinishDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - call.return_value = None - client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_finish_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.FinishDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.InspectContentRequest, - dict, -]) -def test_inspect_content_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectContentResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.inspect_content(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectContentResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_inspect_content_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_inspect_content") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_inspect_content") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.InspectContentRequest.pb(dlp.InspectContentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.InspectContentResponse.to_json(dlp.InspectContentResponse()) - - request = dlp.InspectContentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.InspectContentResponse() - - client.inspect_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_inspect_content_rest_bad_request(transport: str = 'rest', request_type=dlp.InspectContentRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.inspect_content(request) - - -def test_inspect_content_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.RedactImageRequest, - dict, -]) -def test_redact_image_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.RedactImageResponse( - redacted_image=b'redacted_image_blob', - extracted_text='extracted_text_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.RedactImageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.redact_image(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.RedactImageResponse) - assert response.redacted_image == b'redacted_image_blob' - assert response.extracted_text == 'extracted_text_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_redact_image_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_redact_image") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_redact_image") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.RedactImageRequest.pb(dlp.RedactImageRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.RedactImageResponse.to_json(dlp.RedactImageResponse()) - - request = dlp.RedactImageRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.RedactImageResponse() - - client.redact_image(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_redact_image_rest_bad_request(transport: str = 'rest', request_type=dlp.RedactImageRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.redact_image(request) - - -def test_redact_image_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeidentifyContentRequest, - dict, -]) -def test_deidentify_content_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyContentResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.deidentify_content(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyContentResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_deidentify_content_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_deidentify_content") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_deidentify_content") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.DeidentifyContentRequest.pb(dlp.DeidentifyContentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DeidentifyContentResponse.to_json(dlp.DeidentifyContentResponse()) - - request = dlp.DeidentifyContentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DeidentifyContentResponse() - - client.deidentify_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_deidentify_content_rest_bad_request(transport: str = 'rest', request_type=dlp.DeidentifyContentRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.deidentify_content(request) - - -def test_deidentify_content_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ReidentifyContentRequest, - dict, -]) -def test_reidentify_content_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ReidentifyContentResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ReidentifyContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.reidentify_content(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ReidentifyContentResponse) - - -def test_reidentify_content_rest_required_fields(request_type=dlp.ReidentifyContentRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reidentify_content._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reidentify_content._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ReidentifyContentResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.ReidentifyContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.reidentify_content(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_reidentify_content_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.reidentify_content._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_reidentify_content_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_reidentify_content") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_reidentify_content") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ReidentifyContentRequest.pb(dlp.ReidentifyContentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ReidentifyContentResponse.to_json(dlp.ReidentifyContentResponse()) - - request = dlp.ReidentifyContentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ReidentifyContentResponse() - - client.reidentify_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_reidentify_content_rest_bad_request(transport: str = 'rest', request_type=dlp.ReidentifyContentRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.reidentify_content(request) - - -def test_reidentify_content_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListInfoTypesRequest, - dict, -]) -def test_list_info_types_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListInfoTypesResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_info_types(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ListInfoTypesResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_info_types_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_info_types") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_info_types") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ListInfoTypesRequest.pb(dlp.ListInfoTypesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ListInfoTypesResponse.to_json(dlp.ListInfoTypesResponse()) - - request = dlp.ListInfoTypesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListInfoTypesResponse() - - client.list_info_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_info_types_rest_bad_request(transport: str = 'rest', request_type=dlp.ListInfoTypesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_info_types(request) - - -def test_list_info_types_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListInfoTypesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_info_types(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/infoTypes" % client.transport._host, args[1]) - - -def test_list_info_types_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_info_types( - dlp.ListInfoTypesRequest(), - parent='parent_value', - ) - - -def test_list_info_types_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateInspectTemplateRequest, - dict, -]) -def test_create_inspect_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_inspect_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_inspect_template_rest_required_fields(request_type=dlp.CreateInspectTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_inspect_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_inspect_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_inspect_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "inspectTemplate", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_inspect_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_inspect_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_inspect_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.CreateInspectTemplateRequest.pb(dlp.CreateInspectTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) - - request = dlp.CreateInspectTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.InspectTemplate() - - client.create_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_inspect_template(request) - - -def test_create_inspect_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_inspect_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/inspectTemplates" % client.transport._host, args[1]) - - -def test_create_inspect_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_inspect_template( - dlp.CreateInspectTemplateRequest(), - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - -def test_create_inspect_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateInspectTemplateRequest, - dict, -]) -def test_update_inspect_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_inspect_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_inspect_template_rest_required_fields(request_type=dlp.UpdateInspectTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_inspect_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_inspect_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_inspect_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_inspect_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_inspect_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_inspect_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.UpdateInspectTemplateRequest.pb(dlp.UpdateInspectTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) - - request = dlp.UpdateInspectTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.InspectTemplate() - - client.update_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_inspect_template(request) - - -def test_update_inspect_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/inspectTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_inspect_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, args[1]) - - -def test_update_inspect_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_inspect_template( - dlp.UpdateInspectTemplateRequest(), - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_inspect_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetInspectTemplateRequest, - dict, -]) -def test_get_inspect_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_inspect_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_inspect_template_rest_required_fields(request_type=dlp.GetInspectTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_inspect_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_inspect_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_inspect_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_inspect_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_inspect_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_inspect_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.GetInspectTemplateRequest.pb(dlp.GetInspectTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) - - request = dlp.GetInspectTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.InspectTemplate() - - client.get_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.GetInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_inspect_template(request) - - -def test_get_inspect_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/inspectTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_inspect_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, args[1]) - - -def test_get_inspect_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_inspect_template( - dlp.GetInspectTemplateRequest(), - name='name_value', - ) - - -def test_get_inspect_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListInspectTemplatesRequest, - dict, -]) -def test_list_inspect_templates_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListInspectTemplatesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_inspect_templates(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInspectTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_inspect_templates_rest_required_fields(request_type=dlp.ListInspectTemplatesRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_inspect_templates._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_inspect_templates._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListInspectTemplatesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_inspect_templates(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_inspect_templates_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_inspect_templates._get_unset_required_fields({}) - assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_inspect_templates_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_inspect_templates") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_inspect_templates") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ListInspectTemplatesRequest.pb(dlp.ListInspectTemplatesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ListInspectTemplatesResponse.to_json(dlp.ListInspectTemplatesResponse()) - - request = dlp.ListInspectTemplatesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListInspectTemplatesResponse() - - client.list_inspect_templates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_inspect_templates_rest_bad_request(transport: str = 'rest', request_type=dlp.ListInspectTemplatesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_inspect_templates(request) - - -def test_list_inspect_templates_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListInspectTemplatesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_inspect_templates(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/inspectTemplates" % client.transport._host, args[1]) - - -def test_list_inspect_templates_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_inspect_templates( - dlp.ListInspectTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_inspect_templates_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListInspectTemplatesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'organizations/sample1'} - - pager = client.list_inspect_templates(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.InspectTemplate) - for i in results) - - pages = list(client.list_inspect_templates(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteInspectTemplateRequest, - dict, -]) -def test_delete_inspect_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_inspect_template(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_inspect_template_rest_required_fields(request_type=dlp.DeleteInspectTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_inspect_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_inspect_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_inspect_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_inspect_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_inspect_template") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteInspectTemplateRequest.pb(dlp.DeleteInspectTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.DeleteInspectTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_inspect_template(request) - - -def test_delete_inspect_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/inspectTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_inspect_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, args[1]) - - -def test_delete_inspect_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_inspect_template( - dlp.DeleteInspectTemplateRequest(), - name='name_value', - ) - - -def test_delete_inspect_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateDeidentifyTemplateRequest, - dict, -]) -def test_create_deidentify_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_deidentify_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_deidentify_template_rest_required_fields(request_type=dlp.CreateDeidentifyTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_deidentify_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_deidentify_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_deidentify_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "deidentifyTemplate", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_deidentify_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_deidentify_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_deidentify_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.CreateDeidentifyTemplateRequest.pb(dlp.CreateDeidentifyTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) - - request = dlp.CreateDeidentifyTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DeidentifyTemplate() - - client.create_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_deidentify_template(request) - - -def test_create_deidentify_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_deidentify_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/deidentifyTemplates" % client.transport._host, args[1]) - - -def test_create_deidentify_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_deidentify_template( - dlp.CreateDeidentifyTemplateRequest(), - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - -def test_create_deidentify_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateDeidentifyTemplateRequest, - dict, -]) -def test_update_deidentify_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_deidentify_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_deidentify_template_rest_required_fields(request_type=dlp.UpdateDeidentifyTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_deidentify_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_deidentify_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_deidentify_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_deidentify_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_deidentify_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_deidentify_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.UpdateDeidentifyTemplateRequest.pb(dlp.UpdateDeidentifyTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) - - request = dlp.UpdateDeidentifyTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DeidentifyTemplate() - - client.update_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_deidentify_template(request) - - -def test_update_deidentify_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_deidentify_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) - - -def test_update_deidentify_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_deidentify_template( - dlp.UpdateDeidentifyTemplateRequest(), - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_deidentify_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetDeidentifyTemplateRequest, - dict, -]) -def test_get_deidentify_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_deidentify_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_deidentify_template_rest_required_fields(request_type=dlp.GetDeidentifyTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_deidentify_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_deidentify_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_deidentify_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_deidentify_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_deidentify_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_deidentify_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.GetDeidentifyTemplateRequest.pb(dlp.GetDeidentifyTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) - - request = dlp.GetDeidentifyTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DeidentifyTemplate() - - client.get_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.GetDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_deidentify_template(request) - - -def test_get_deidentify_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_deidentify_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) - - -def test_get_deidentify_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_deidentify_template( - dlp.GetDeidentifyTemplateRequest(), - name='name_value', - ) - - -def test_get_deidentify_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListDeidentifyTemplatesRequest, - dict, -]) -def test_list_deidentify_templates_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListDeidentifyTemplatesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_deidentify_templates(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeidentifyTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_deidentify_templates_rest_required_fields(request_type=dlp.ListDeidentifyTemplatesRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_deidentify_templates._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_deidentify_templates._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListDeidentifyTemplatesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_deidentify_templates(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_deidentify_templates_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_deidentify_templates._get_unset_required_fields({}) - assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_deidentify_templates_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_deidentify_templates") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_deidentify_templates") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ListDeidentifyTemplatesRequest.pb(dlp.ListDeidentifyTemplatesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ListDeidentifyTemplatesResponse.to_json(dlp.ListDeidentifyTemplatesResponse()) - - request = dlp.ListDeidentifyTemplatesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListDeidentifyTemplatesResponse() - - client.list_deidentify_templates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_deidentify_templates_rest_bad_request(transport: str = 'rest', request_type=dlp.ListDeidentifyTemplatesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_deidentify_templates(request) - - -def test_list_deidentify_templates_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListDeidentifyTemplatesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_deidentify_templates(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/deidentifyTemplates" % client.transport._host, args[1]) - - -def test_list_deidentify_templates_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_deidentify_templates( - dlp.ListDeidentifyTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_deidentify_templates_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListDeidentifyTemplatesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'organizations/sample1'} - - pager = client.list_deidentify_templates(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.DeidentifyTemplate) - for i in results) - - pages = list(client.list_deidentify_templates(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteDeidentifyTemplateRequest, - dict, -]) -def test_delete_deidentify_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_deidentify_template(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_deidentify_template_rest_required_fields(request_type=dlp.DeleteDeidentifyTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_deidentify_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_deidentify_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_deidentify_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_deidentify_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_deidentify_template") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteDeidentifyTemplateRequest.pb(dlp.DeleteDeidentifyTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.DeleteDeidentifyTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_deidentify_template(request) - - -def test_delete_deidentify_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_deidentify_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) - - -def test_delete_deidentify_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_deidentify_template( - dlp.DeleteDeidentifyTemplateRequest(), - name='name_value', - ) - - -def test_delete_deidentify_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateJobTriggerRequest, - dict, -]) -def test_create_job_trigger_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_create_job_trigger_rest_required_fields(request_type=dlp.CreateJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "jobTrigger", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.CreateJobTriggerRequest.pb(dlp.CreateJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) - - request = dlp.CreateJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.JobTrigger() - - client.create_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_job_trigger(request) - - -def test_create_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1]) - - -def test_create_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job_trigger( - dlp.CreateJobTriggerRequest(), - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - -def test_create_job_trigger_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateJobTriggerRequest, - dict, -]) -def test_update_job_trigger_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_update_job_trigger_rest_required_fields(request_type=dlp.UpdateJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.UpdateJobTriggerRequest.pb(dlp.UpdateJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) - - request = dlp.UpdateJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.JobTrigger() - - client.update_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_job_trigger(request) - - -def test_update_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) - - -def test_update_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_job_trigger( - dlp.UpdateJobTriggerRequest(), - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_job_trigger_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.HybridInspectJobTriggerRequest, - dict, -]) -def test_hybrid_inspect_job_trigger_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.hybrid_inspect_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -def test_hybrid_inspect_job_trigger_rest_required_fields(request_type=dlp.HybridInspectJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.hybrid_inspect_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_hybrid_inspect_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.hybrid_inspect_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_hybrid_inspect_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.HybridInspectJobTriggerRequest.pb(dlp.HybridInspectJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.HybridInspectResponse.to_json(dlp.HybridInspectResponse()) - - request = dlp.HybridInspectJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.HybridInspectResponse() - - client.hybrid_inspect_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_hybrid_inspect_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.HybridInspectJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.hybrid_inspect_job_trigger(request) - - -def test_hybrid_inspect_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.hybrid_inspect_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect" % client.transport._host, args[1]) - - -def test_hybrid_inspect_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.hybrid_inspect_job_trigger( - dlp.HybridInspectJobTriggerRequest(), - name='name_value', - ) - - -def test_hybrid_inspect_job_trigger_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetJobTriggerRequest, - dict, -]) -def test_get_job_trigger_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_get_job_trigger_rest_required_fields(request_type=dlp.GetJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.GetJobTriggerRequest.pb(dlp.GetJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) - - request = dlp.GetJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.JobTrigger() - - client.get_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.GetJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_job_trigger(request) - - -def test_get_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) - - -def test_get_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job_trigger( - dlp.GetJobTriggerRequest(), - name='name_value', - ) - - -def test_get_job_trigger_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListJobTriggersRequest, - dict, -]) -def test_list_job_triggers_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListJobTriggersResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_job_triggers(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTriggersPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_job_triggers_rest_required_fields(request_type=dlp.ListJobTriggersRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_job_triggers._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_job_triggers._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "location_id", "order_by", "page_size", "page_token", "type_", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListJobTriggersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_job_triggers(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_job_triggers_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_job_triggers._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "locationId", "orderBy", "pageSize", "pageToken", "type", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_job_triggers_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_job_triggers") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_job_triggers") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ListJobTriggersRequest.pb(dlp.ListJobTriggersRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ListJobTriggersResponse.to_json(dlp.ListJobTriggersResponse()) - - request = dlp.ListJobTriggersRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListJobTriggersResponse() - - client.list_job_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_job_triggers_rest_bad_request(transport: str = 'rest', request_type=dlp.ListJobTriggersRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_job_triggers(request) - - -def test_list_job_triggers_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListJobTriggersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_job_triggers(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1]) - - -def test_list_job_triggers_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_job_triggers( - dlp.ListJobTriggersRequest(), - parent='parent_value', - ) - - -def test_list_job_triggers_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListJobTriggersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1'} - - pager = client.list_job_triggers(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.JobTrigger) - for i in results) - - pages = list(client.list_job_triggers(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteJobTriggerRequest, - dict, -]) -def test_delete_job_trigger_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_job_trigger(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_trigger_rest_required_fields(request_type=dlp.DeleteJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_job_trigger") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteJobTriggerRequest.pb(dlp.DeleteJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.DeleteJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_job_trigger(request) - - -def test_delete_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) - - -def test_delete_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job_trigger( - dlp.DeleteJobTriggerRequest(), - name='name_value', - ) - - -def test_delete_job_trigger_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ActivateJobTriggerRequest, - dict, -]) -def test_activate_job_trigger_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.activate_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_activate_job_trigger_rest_required_fields(request_type=dlp.ActivateJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).activate_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).activate_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.activate_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_activate_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.activate_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_activate_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_activate_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_activate_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ActivateJobTriggerRequest.pb(dlp.ActivateJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) - - request = dlp.ActivateJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DlpJob() - - client.activate_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_activate_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.ActivateJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.activate_job_trigger(request) - - -def test_activate_job_trigger_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateDlpJobRequest, - dict, -]) -def test_create_dlp_job_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_dlp_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_create_dlp_job_rest_required_fields(request_type=dlp.CreateDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_dlp_job") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_dlp_job") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.CreateDlpJobRequest.pb(dlp.CreateDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) - - request = dlp.CreateDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DlpJob() - - client.create_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_dlp_job(request) - - -def test_create_dlp_job_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_dlp_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1]) - - -def test_create_dlp_job_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dlp_job( - dlp.CreateDlpJobRequest(), - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - -def test_create_dlp_job_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListDlpJobsRequest, - dict, -]) -def test_list_dlp_jobs_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListDlpJobsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_dlp_jobs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDlpJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_dlp_jobs_rest_required_fields(request_type=dlp.ListDlpJobsRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_dlp_jobs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_dlp_jobs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "location_id", "order_by", "page_size", "page_token", "type_", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListDlpJobsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_dlp_jobs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_dlp_jobs_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_dlp_jobs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "locationId", "orderBy", "pageSize", "pageToken", "type", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_dlp_jobs_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_dlp_jobs") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_dlp_jobs") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ListDlpJobsRequest.pb(dlp.ListDlpJobsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ListDlpJobsResponse.to_json(dlp.ListDlpJobsResponse()) - - request = dlp.ListDlpJobsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListDlpJobsResponse() - - client.list_dlp_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_dlp_jobs_rest_bad_request(transport: str = 'rest', request_type=dlp.ListDlpJobsRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_dlp_jobs(request) - - -def test_list_dlp_jobs_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListDlpJobsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_dlp_jobs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1]) - - -def test_list_dlp_jobs_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_dlp_jobs( - dlp.ListDlpJobsRequest(), - parent='parent_value', - ) - - -def test_list_dlp_jobs_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListDlpJobsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1'} - - pager = client.list_dlp_jobs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.DlpJob) - for i in results) - - pages = list(client.list_dlp_jobs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - dlp.GetDlpJobRequest, - dict, -]) -def test_get_dlp_job_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_dlp_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_get_dlp_job_rest_required_fields(request_type=dlp.GetDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_dlp_job") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_dlp_job") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.GetDlpJobRequest.pb(dlp.GetDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) - - request = dlp.GetDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DlpJob() - - client.get_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.GetDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_dlp_job(request) - - -def test_get_dlp_job_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/dlpJobs/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_dlp_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1]) - - -def test_get_dlp_job_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_dlp_job( - dlp.GetDlpJobRequest(), - name='name_value', - ) - - -def test_get_dlp_job_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteDlpJobRequest, - dict, -]) -def test_delete_dlp_job_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_dlp_job(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_dlp_job_rest_required_fields(request_type=dlp.DeleteDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_dlp_job") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteDlpJobRequest.pb(dlp.DeleteDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.DeleteDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_dlp_job(request) - - -def test_delete_dlp_job_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/dlpJobs/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_dlp_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1]) - - -def test_delete_dlp_job_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dlp_job( - dlp.DeleteDlpJobRequest(), - name='name_value', - ) - - -def test_delete_dlp_job_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CancelDlpJobRequest, - dict, -]) -def test_cancel_dlp_job_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.cancel_dlp_job(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_cancel_dlp_job_rest_required_fields(request_type=dlp.CancelDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.cancel_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_cancel_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.cancel_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_cancel_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_cancel_dlp_job") as pre: - pre.assert_not_called() - pb_message = dlp.CancelDlpJobRequest.pb(dlp.CancelDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.CancelDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.cancel_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_cancel_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.CancelDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_dlp_job(request) - - -def test_cancel_dlp_job_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateStoredInfoTypeRequest, - dict, -]) -def test_create_stored_info_type_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_stored_info_type(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_create_stored_info_type_rest_required_fields(request_type=dlp.CreateStoredInfoTypeRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_stored_info_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_stored_info_type_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_stored_info_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "config", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_stored_info_type_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_stored_info_type") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_stored_info_type") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.CreateStoredInfoTypeRequest.pb(dlp.CreateStoredInfoTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) - - request = dlp.CreateStoredInfoTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.StoredInfoType() - - client.create_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_stored_info_type(request) - - -def test_create_stored_info_type_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_stored_info_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, args[1]) - - -def test_create_stored_info_type_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_stored_info_type( - dlp.CreateStoredInfoTypeRequest(), - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - -def test_create_stored_info_type_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateStoredInfoTypeRequest, - dict, -]) -def test_update_stored_info_type_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_stored_info_type(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_update_stored_info_type_rest_required_fields(request_type=dlp.UpdateStoredInfoTypeRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_stored_info_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_stored_info_type_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_stored_info_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_stored_info_type_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_stored_info_type") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_stored_info_type") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.UpdateStoredInfoTypeRequest.pb(dlp.UpdateStoredInfoTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) - - request = dlp.UpdateStoredInfoTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.StoredInfoType() - - client.update_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_stored_info_type(request) - - -def test_update_stored_info_type_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_stored_info_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) - - -def test_update_stored_info_type_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_stored_info_type( - dlp.UpdateStoredInfoTypeRequest(), - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_stored_info_type_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetStoredInfoTypeRequest, - dict, -]) -def test_get_stored_info_type_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_stored_info_type(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_get_stored_info_type_rest_required_fields(request_type=dlp.GetStoredInfoTypeRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_stored_info_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_stored_info_type_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_stored_info_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_stored_info_type_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_stored_info_type") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_stored_info_type") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.GetStoredInfoTypeRequest.pb(dlp.GetStoredInfoTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) - - request = dlp.GetStoredInfoTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.StoredInfoType() - - client.get_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.GetStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_stored_info_type(request) - - -def test_get_stored_info_type_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_stored_info_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) - - -def test_get_stored_info_type_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_stored_info_type( - dlp.GetStoredInfoTypeRequest(), - name='name_value', - ) - - -def test_get_stored_info_type_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListStoredInfoTypesRequest, - dict, -]) -def test_list_stored_info_types_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListStoredInfoTypesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_stored_info_types(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListStoredInfoTypesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_stored_info_types_rest_required_fields(request_type=dlp.ListStoredInfoTypesRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_stored_info_types._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_stored_info_types._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListStoredInfoTypesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_stored_info_types(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_stored_info_types_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_stored_info_types._get_unset_required_fields({}) - assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_stored_info_types_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_stored_info_types") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_stored_info_types") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ListStoredInfoTypesRequest.pb(dlp.ListStoredInfoTypesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ListStoredInfoTypesResponse.to_json(dlp.ListStoredInfoTypesResponse()) - - request = dlp.ListStoredInfoTypesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListStoredInfoTypesResponse() - - client.list_stored_info_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_stored_info_types_rest_bad_request(transport: str = 'rest', request_type=dlp.ListStoredInfoTypesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_stored_info_types(request) - - -def test_list_stored_info_types_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListStoredInfoTypesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_stored_info_types(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, args[1]) - - -def test_list_stored_info_types_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_stored_info_types( - dlp.ListStoredInfoTypesRequest(), - parent='parent_value', - ) - - -def test_list_stored_info_types_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListStoredInfoTypesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'organizations/sample1'} - - pager = client.list_stored_info_types(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.StoredInfoType) - for i in results) - - pages = list(client.list_stored_info_types(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteStoredInfoTypeRequest, - dict, -]) -def test_delete_stored_info_type_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_stored_info_type(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_stored_info_type_rest_required_fields(request_type=dlp.DeleteStoredInfoTypeRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_stored_info_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_stored_info_type_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_stored_info_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_stored_info_type_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_stored_info_type") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteStoredInfoTypeRequest.pb(dlp.DeleteStoredInfoTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.DeleteStoredInfoTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_stored_info_type(request) - - -def test_delete_stored_info_type_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_stored_info_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) - - -def test_delete_stored_info_type_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_stored_info_type( - dlp.DeleteStoredInfoTypeRequest(), - name='name_value', - ) - - -def test_delete_stored_info_type_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.HybridInspectDlpJobRequest, - dict, -]) -def test_hybrid_inspect_dlp_job_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.hybrid_inspect_dlp_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -def test_hybrid_inspect_dlp_job_rest_required_fields(request_type=dlp.HybridInspectDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.hybrid_inspect_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_hybrid_inspect_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.hybrid_inspect_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_hybrid_inspect_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_dlp_job") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_dlp_job") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.HybridInspectDlpJobRequest.pb(dlp.HybridInspectDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.HybridInspectResponse.to_json(dlp.HybridInspectResponse()) - - request = dlp.HybridInspectDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.HybridInspectResponse() - - client.hybrid_inspect_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_hybrid_inspect_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.HybridInspectDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.hybrid_inspect_dlp_job(request) - - -def test_hybrid_inspect_dlp_job_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.hybrid_inspect_dlp_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect" % client.transport._host, args[1]) - - -def test_hybrid_inspect_dlp_job_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.hybrid_inspect_dlp_job( - dlp.HybridInspectDlpJobRequest(), - name='name_value', - ) - - -def test_hybrid_inspect_dlp_job_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.FinishDlpJobRequest, - dict, -]) -def test_finish_dlp_job_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.finish_dlp_job(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_finish_dlp_job_rest_required_fields(request_type=dlp.FinishDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).finish_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).finish_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.finish_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_finish_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.finish_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_finish_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_finish_dlp_job") as pre: - pre.assert_not_called() - pb_message = dlp.FinishDlpJobRequest.pb(dlp.FinishDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.FinishDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.finish_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_finish_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.FinishDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.finish_dlp_job(request) - - -def test_finish_dlp_job_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DlpServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DlpServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceGrpcAsyncIOTransport, - transports.DlpServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = DlpServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DlpServiceGrpcTransport, - ) - -def test_dlp_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DlpServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_dlp_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DlpServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'inspect_content', - 'redact_image', - 'deidentify_content', - 'reidentify_content', - 'list_info_types', - 'create_inspect_template', - 'update_inspect_template', - 'get_inspect_template', - 'list_inspect_templates', - 'delete_inspect_template', - 'create_deidentify_template', - 'update_deidentify_template', - 'get_deidentify_template', - 'list_deidentify_templates', - 'delete_deidentify_template', - 'create_job_trigger', - 'update_job_trigger', - 'hybrid_inspect_job_trigger', - 'get_job_trigger', - 'list_job_triggers', - 'delete_job_trigger', - 'activate_job_trigger', - 'create_dlp_job', - 'list_dlp_jobs', - 'get_dlp_job', - 'delete_dlp_job', - 'cancel_dlp_job', - 'create_stored_info_type', - 'update_stored_info_type', - 'get_stored_info_type', - 'list_stored_info_types', - 'delete_stored_info_type', - 'hybrid_inspect_dlp_job', - 'finish_dlp_job', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_dlp_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DlpServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_dlp_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DlpServiceTransport() - adc.assert_called_once() - - -def test_dlp_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DlpServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceGrpcAsyncIOTransport, - ], -) -def test_dlp_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceGrpcAsyncIOTransport, - transports.DlpServiceRestTransport, - ], -) -def test_dlp_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DlpServiceGrpcTransport, grpc_helpers), - (transports.DlpServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_dlp_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dlp.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dlp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) -def test_dlp_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_dlp_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.DlpServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_dlp_service_host_no_port(transport_name): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dlp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dlp.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_dlp_service_host_with_port(transport_name): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dlp.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dlp.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_dlp_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = DlpServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = DlpServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.inspect_content._session - session2 = client2.transport.inspect_content._session - assert session1 != session2 - session1 = client1.transport.redact_image._session - session2 = client2.transport.redact_image._session - assert session1 != session2 - session1 = client1.transport.deidentify_content._session - session2 = client2.transport.deidentify_content._session - assert session1 != session2 - session1 = client1.transport.reidentify_content._session - session2 = client2.transport.reidentify_content._session - assert session1 != session2 - session1 = client1.transport.list_info_types._session - session2 = client2.transport.list_info_types._session - assert session1 != session2 - session1 = client1.transport.create_inspect_template._session - session2 = client2.transport.create_inspect_template._session - assert session1 != session2 - session1 = client1.transport.update_inspect_template._session - session2 = client2.transport.update_inspect_template._session - assert session1 != session2 - session1 = client1.transport.get_inspect_template._session - session2 = client2.transport.get_inspect_template._session - assert session1 != session2 - session1 = client1.transport.list_inspect_templates._session - session2 = client2.transport.list_inspect_templates._session - assert session1 != session2 - session1 = client1.transport.delete_inspect_template._session - session2 = client2.transport.delete_inspect_template._session - assert session1 != session2 - session1 = client1.transport.create_deidentify_template._session - session2 = client2.transport.create_deidentify_template._session - assert session1 != session2 - session1 = client1.transport.update_deidentify_template._session - session2 = client2.transport.update_deidentify_template._session - assert session1 != session2 - session1 = client1.transport.get_deidentify_template._session - session2 = client2.transport.get_deidentify_template._session - assert session1 != session2 - session1 = client1.transport.list_deidentify_templates._session - session2 = client2.transport.list_deidentify_templates._session - assert session1 != session2 - session1 = client1.transport.delete_deidentify_template._session - session2 = client2.transport.delete_deidentify_template._session - assert session1 != session2 - session1 = client1.transport.create_job_trigger._session - session2 = client2.transport.create_job_trigger._session - assert session1 != session2 - session1 = client1.transport.update_job_trigger._session - session2 = client2.transport.update_job_trigger._session - assert session1 != session2 - session1 = client1.transport.hybrid_inspect_job_trigger._session - session2 = client2.transport.hybrid_inspect_job_trigger._session - assert session1 != session2 - session1 = client1.transport.get_job_trigger._session - session2 = client2.transport.get_job_trigger._session - assert session1 != session2 - session1 = client1.transport.list_job_triggers._session - session2 = client2.transport.list_job_triggers._session - assert session1 != session2 - session1 = client1.transport.delete_job_trigger._session - session2 = client2.transport.delete_job_trigger._session - assert session1 != session2 - session1 = client1.transport.activate_job_trigger._session - session2 = client2.transport.activate_job_trigger._session - assert session1 != session2 - session1 = client1.transport.create_dlp_job._session - session2 = client2.transport.create_dlp_job._session - assert session1 != session2 - session1 = client1.transport.list_dlp_jobs._session - session2 = client2.transport.list_dlp_jobs._session - assert session1 != session2 - session1 = client1.transport.get_dlp_job._session - session2 = client2.transport.get_dlp_job._session - assert session1 != session2 - session1 = client1.transport.delete_dlp_job._session - session2 = client2.transport.delete_dlp_job._session - assert session1 != session2 - session1 = client1.transport.cancel_dlp_job._session - session2 = client2.transport.cancel_dlp_job._session - assert session1 != session2 - session1 = client1.transport.create_stored_info_type._session - session2 = client2.transport.create_stored_info_type._session - assert session1 != session2 - session1 = client1.transport.update_stored_info_type._session - session2 = client2.transport.update_stored_info_type._session - assert session1 != session2 - session1 = client1.transport.get_stored_info_type._session - session2 = client2.transport.get_stored_info_type._session - assert session1 != session2 - session1 = client1.transport.list_stored_info_types._session - session2 = client2.transport.list_stored_info_types._session - assert session1 != session2 - session1 = client1.transport.delete_stored_info_type._session - session2 = client2.transport.delete_stored_info_type._session - assert session1 != session2 - session1 = client1.transport.hybrid_inspect_dlp_job._session - session2 = client2.transport.hybrid_inspect_dlp_job._session - assert session1 != session2 - session1 = client1.transport.finish_dlp_job._session - session2 = client2.transport.finish_dlp_job._session - assert session1 != session2 -def test_dlp_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DlpServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_dlp_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DlpServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) -def test_dlp_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) -def test_dlp_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_deidentify_template_path(): - organization = "squid" - deidentify_template = "clam" - expected = "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) - actual = DlpServiceClient.deidentify_template_path(organization, deidentify_template) - assert expected == actual - - -def test_parse_deidentify_template_path(): - expected = { - "organization": "whelk", - "deidentify_template": "octopus", - } - path = DlpServiceClient.deidentify_template_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_deidentify_template_path(path) - assert expected == actual - -def test_dlp_content_path(): - project = "oyster" - expected = "projects/{project}/dlpContent".format(project=project, ) - actual = DlpServiceClient.dlp_content_path(project) - assert expected == actual - - -def test_parse_dlp_content_path(): - expected = { - "project": "nudibranch", - } - path = DlpServiceClient.dlp_content_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_dlp_content_path(path) - assert expected == actual - -def test_dlp_job_path(): - project = "cuttlefish" - dlp_job = "mussel" - expected = "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) - actual = DlpServiceClient.dlp_job_path(project, dlp_job) - assert expected == actual - - -def test_parse_dlp_job_path(): - expected = { - "project": "winkle", - "dlp_job": "nautilus", - } - path = DlpServiceClient.dlp_job_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_dlp_job_path(path) - assert expected == actual - -def test_finding_path(): - project = "scallop" - location = "abalone" - finding = "squid" - expected = "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) - actual = DlpServiceClient.finding_path(project, location, finding) - assert expected == actual - - -def test_parse_finding_path(): - expected = { - "project": "clam", - "location": "whelk", - "finding": "octopus", - } - path = DlpServiceClient.finding_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_finding_path(path) - assert expected == actual - -def test_inspect_template_path(): - organization = "oyster" - inspect_template = "nudibranch" - expected = "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) - actual = DlpServiceClient.inspect_template_path(organization, inspect_template) - assert expected == actual - - -def test_parse_inspect_template_path(): - expected = { - "organization": "cuttlefish", - "inspect_template": "mussel", - } - path = DlpServiceClient.inspect_template_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_inspect_template_path(path) - assert expected == actual - -def test_job_trigger_path(): - project = "winkle" - job_trigger = "nautilus" - expected = "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) - actual = DlpServiceClient.job_trigger_path(project, job_trigger) - assert expected == actual - - -def test_parse_job_trigger_path(): - expected = { - "project": "scallop", - "job_trigger": "abalone", - } - path = DlpServiceClient.job_trigger_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_job_trigger_path(path) - assert expected == actual - -def test_stored_info_type_path(): - organization = "squid" - stored_info_type = "clam" - expected = "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) - actual = DlpServiceClient.stored_info_type_path(organization, stored_info_type) - assert expected == actual - - -def test_parse_stored_info_type_path(): - expected = { - "organization": "whelk", - "stored_info_type": "octopus", - } - path = DlpServiceClient.stored_info_type_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_stored_info_type_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DlpServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = DlpServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = DlpServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = DlpServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DlpServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = DlpServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = DlpServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = DlpServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DlpServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = DlpServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = DlpServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/tests/unit/gapic/dlp_v2/test_dlp_service.py b/tests/unit/gapic/dlp_v2/test_dlp_service.py index d7d5851c..062e0fe9 100644 --- a/tests/unit/gapic/dlp_v2/test_dlp_service.py +++ b/tests/unit/gapic/dlp_v2/test_dlp_service.py @@ -32,6 +32,7 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 from google.oauth2 import service_account from google.protobuf import any_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore From 7c9e4c4eb27630888d43fe7aa141515022c12c85 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Fri, 24 Feb 2023 01:57:11 +0000 Subject: [PATCH 5/7] chore: Update gapic-generator-python to v1.8.5 PiperOrigin-RevId: 511892190 Source-Link: https://github.com/googleapis/googleapis/commit/a45d9c09c1287ffdf938f4e8083e791046c0b23b Source-Link: https://github.com/googleapis/googleapis-gen/commit/1907294b1d8365ea24f8c5f2e059a64124c4ed3b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTkwNzI5NGIxZDgzNjVlYTI0ZjhjNWYyZTA1OWE2NDEyNGM0ZWQzYiJ9 --- owl-bot-staging/v2/.coveragerc | 13 + owl-bot-staging/v2/.flake8 | 33 + owl-bot-staging/v2/MANIFEST.in | 2 + owl-bot-staging/v2/README.rst | 49 + owl-bot-staging/v2/docs/conf.py | 376 + .../v2/docs/dlp_v2/dlp_service.rst | 10 + owl-bot-staging/v2/docs/dlp_v2/services.rst | 6 + owl-bot-staging/v2/docs/dlp_v2/types.rst | 6 + owl-bot-staging/v2/docs/index.rst | 7 + .../v2/google/cloud/dlp/__init__.py | 395 + .../v2/google/cloud/dlp/gapic_version.py | 16 + owl-bot-staging/v2/google/cloud/dlp/py.typed | 2 + .../v2/google/cloud/dlp_v2/__init__.py | 396 + .../google/cloud/dlp_v2/gapic_metadata.json | 538 + .../v2/google/cloud/dlp_v2/gapic_version.py | 16 + .../v2/google/cloud/dlp_v2/py.typed | 2 + .../google/cloud/dlp_v2/services/__init__.py | 15 + .../dlp_v2/services/dlp_service/__init__.py | 22 + .../services/dlp_service/async_client.py | 4143 ++++ .../dlp_v2/services/dlp_service/client.py | 4269 ++++ .../dlp_v2/services/dlp_service/pagers.py | 623 + .../dlp_service/transports/__init__.py | 38 + .../services/dlp_service/transports/base.py | 752 + .../services/dlp_service/transports/grpc.py | 1262 ++ .../dlp_service/transports/grpc_asyncio.py | 1261 ++ .../services/dlp_service/transports/rest.py | 4325 ++++ .../v2/google/cloud/dlp_v2/types/__init__.py | 390 + .../v2/google/cloud/dlp_v2/types/dlp.py | 8848 ++++++++ .../v2/google/cloud/dlp_v2/types/storage.py | 1476 ++ owl-bot-staging/v2/mypy.ini | 3 + owl-bot-staging/v2/noxfile.py | 184 + ..._dlp_service_activate_job_trigger_async.py | 52 + ...d_dlp_service_activate_job_trigger_sync.py | 52 + ...erated_dlp_service_cancel_dlp_job_async.py | 50 + ...nerated_dlp_service_cancel_dlp_job_sync.py | 50 + ...ervice_create_deidentify_template_async.py | 52 + ...service_create_deidentify_template_sync.py | 52 + ...erated_dlp_service_create_dlp_job_async.py | 52 + ...nerated_dlp_service_create_dlp_job_sync.py | 52 + ...p_service_create_inspect_template_async.py | 52 + ...lp_service_create_inspect_template_sync.py | 52 + ...ed_dlp_service_create_job_trigger_async.py | 56 + ...ted_dlp_service_create_job_trigger_sync.py | 56 + ...p_service_create_stored_info_type_async.py | 52 + ...lp_service_create_stored_info_type_sync.py | 52 + ...ed_dlp_service_deidentify_content_async.py | 51 + ...ted_dlp_service_deidentify_content_sync.py | 51 + ...ervice_delete_deidentify_template_async.py | 50 + ...service_delete_deidentify_template_sync.py | 50 + ...erated_dlp_service_delete_dlp_job_async.py | 50 + ...nerated_dlp_service_delete_dlp_job_sync.py | 50 + ...p_service_delete_inspect_template_async.py | 50 + ...lp_service_delete_inspect_template_sync.py | 50 + ...ed_dlp_service_delete_job_trigger_async.py | 50 + ...ted_dlp_service_delete_job_trigger_sync.py | 50 + ...p_service_delete_stored_info_type_async.py | 50 + ...lp_service_delete_stored_info_type_sync.py | 50 + ...erated_dlp_service_finish_dlp_job_async.py | 50 + ...nerated_dlp_service_finish_dlp_job_sync.py | 50 + ...p_service_get_deidentify_template_async.py | 52 + ...lp_service_get_deidentify_template_sync.py | 52 + ...generated_dlp_service_get_dlp_job_async.py | 52 + ..._generated_dlp_service_get_dlp_job_sync.py | 52 + ..._dlp_service_get_inspect_template_async.py | 52 + ...d_dlp_service_get_inspect_template_sync.py | 52 + ...rated_dlp_service_get_job_trigger_async.py | 52 + ...erated_dlp_service_get_job_trigger_sync.py | 52 + ..._dlp_service_get_stored_info_type_async.py | 52 + ...d_dlp_service_get_stored_info_type_sync.py | 52 + ...lp_service_hybrid_inspect_dlp_job_async.py | 52 + ...dlp_service_hybrid_inspect_dlp_job_sync.py | 52 + ...ervice_hybrid_inspect_job_trigger_async.py | 52 + ...service_hybrid_inspect_job_trigger_sync.py | 52 + ...rated_dlp_service_inspect_content_async.py | 51 + ...erated_dlp_service_inspect_content_sync.py | 51 + ...service_list_deidentify_templates_async.py | 53 + ..._service_list_deidentify_templates_sync.py | 53 + ...nerated_dlp_service_list_dlp_jobs_async.py | 53 + ...enerated_dlp_service_list_dlp_jobs_sync.py | 53 + ...rated_dlp_service_list_info_types_async.py | 51 + ...erated_dlp_service_list_info_types_sync.py | 51 + ...lp_service_list_inspect_templates_async.py | 53 + ...dlp_service_list_inspect_templates_sync.py | 53 + ...ted_dlp_service_list_job_triggers_async.py | 53 + ...ated_dlp_service_list_job_triggers_sync.py | 53 + ...lp_service_list_stored_info_types_async.py | 53 + ...dlp_service_list_stored_info_types_sync.py | 53 + ...enerated_dlp_service_redact_image_async.py | 51 + ...generated_dlp_service_redact_image_sync.py | 51 + ...ed_dlp_service_reidentify_content_async.py | 52 + ...ted_dlp_service_reidentify_content_sync.py | 52 + ...ervice_update_deidentify_template_async.py | 52 + ...service_update_deidentify_template_sync.py | 52 + ...p_service_update_inspect_template_async.py | 52 + ...lp_service_update_inspect_template_sync.py | 52 + ...ed_dlp_service_update_job_trigger_async.py | 52 + ...ted_dlp_service_update_job_trigger_sync.py | 52 + ...p_service_update_stored_info_type_async.py | 52 + ...lp_service_update_stored_info_type_sync.py | 52 + ...nippet_metadata_google.privacy.dlp.v2.json | 5503 +++++ .../v2/scripts/fixup_dlp_v2_keywords.py | 209 + owl-bot-staging/v2/setup.py | 90 + .../v2/testing/constraints-3.10.txt | 6 + .../v2/testing/constraints-3.11.txt | 6 + .../v2/testing/constraints-3.12.txt | 6 + .../v2/testing/constraints-3.7.txt | 9 + .../v2/testing/constraints-3.8.txt | 6 + .../v2/testing/constraints-3.9.txt | 6 + owl-bot-staging/v2/tests/__init__.py | 16 + owl-bot-staging/v2/tests/unit/__init__.py | 16 + .../v2/tests/unit/gapic/__init__.py | 16 + .../v2/tests/unit/gapic/dlp_v2/__init__.py | 16 + .../unit/gapic/dlp_v2/test_dlp_service.py | 17404 ++++++++++++++++ 113 files changed, 56305 insertions(+) create mode 100644 owl-bot-staging/v2/.coveragerc create mode 100644 owl-bot-staging/v2/.flake8 create mode 100644 owl-bot-staging/v2/MANIFEST.in create mode 100644 owl-bot-staging/v2/README.rst create mode 100644 owl-bot-staging/v2/docs/conf.py create mode 100644 owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst create mode 100644 owl-bot-staging/v2/docs/dlp_v2/services.rst create mode 100644 owl-bot-staging/v2/docs/dlp_v2/types.rst create mode 100644 owl-bot-staging/v2/docs/index.rst create mode 100644 owl-bot-staging/v2/google/cloud/dlp/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp/gapic_version.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp/py.typed create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/py.typed create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py create mode 100644 owl-bot-staging/v2/mypy.ini create mode 100644 owl-bot-staging/v2/noxfile.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json create mode 100644 owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py create mode 100644 owl-bot-staging/v2/setup.py create mode 100644 owl-bot-staging/v2/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/v2/tests/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py diff --git a/owl-bot-staging/v2/.coveragerc b/owl-bot-staging/v2/.coveragerc new file mode 100644 index 00000000..76798ec2 --- /dev/null +++ b/owl-bot-staging/v2/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/dlp/__init__.py + google/cloud/dlp/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/v2/.flake8 b/owl-bot-staging/v2/.flake8 new file mode 100644 index 00000000..29227d4c --- /dev/null +++ b/owl-bot-staging/v2/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/v2/MANIFEST.in b/owl-bot-staging/v2/MANIFEST.in new file mode 100644 index 00000000..148f6bf3 --- /dev/null +++ b/owl-bot-staging/v2/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/dlp *.py +recursive-include google/cloud/dlp_v2 *.py diff --git a/owl-bot-staging/v2/README.rst b/owl-bot-staging/v2/README.rst new file mode 100644 index 00000000..cf97c2e7 --- /dev/null +++ b/owl-bot-staging/v2/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Dlp API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Dlp API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v2/docs/conf.py b/owl-bot-staging/v2/docs/conf.py new file mode 100644 index 00000000..cf2f570a --- /dev/null +++ b/owl-bot-staging/v2/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-dlp documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-dlp" +copyright = u"2022, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-dlp-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-dlp.tex", + u"google-cloud-dlp Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-dlp", + u"Google Cloud Dlp Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-dlp", + u"google-cloud-dlp Documentation", + author, + "google-cloud-dlp", + "GAPIC library for Google Cloud Dlp API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst b/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst new file mode 100644 index 00000000..914da512 --- /dev/null +++ b/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst @@ -0,0 +1,10 @@ +DlpService +---------------------------- + +.. automodule:: google.cloud.dlp_v2.services.dlp_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dlp_v2.services.dlp_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v2/docs/dlp_v2/services.rst b/owl-bot-staging/v2/docs/dlp_v2/services.rst new file mode 100644 index 00000000..864a8c83 --- /dev/null +++ b/owl-bot-staging/v2/docs/dlp_v2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Dlp v2 API +==================================== +.. toctree:: + :maxdepth: 2 + + dlp_service diff --git a/owl-bot-staging/v2/docs/dlp_v2/types.rst b/owl-bot-staging/v2/docs/dlp_v2/types.rst new file mode 100644 index 00000000..5470b717 --- /dev/null +++ b/owl-bot-staging/v2/docs/dlp_v2/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Dlp v2 API +================================= + +.. automodule:: google.cloud.dlp_v2.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/v2/docs/index.rst b/owl-bot-staging/v2/docs/index.rst new file mode 100644 index 00000000..d119451a --- /dev/null +++ b/owl-bot-staging/v2/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + dlp_v2/services + dlp_v2/types diff --git a/owl-bot-staging/v2/google/cloud/dlp/__init__.py b/owl-bot-staging/v2/google/cloud/dlp/__init__.py new file mode 100644 index 00000000..3c1a800c --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp/__init__.py @@ -0,0 +1,395 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dlp import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.dlp_v2.services.dlp_service.client import DlpServiceClient +from google.cloud.dlp_v2.services.dlp_service.async_client import DlpServiceAsyncClient + +from google.cloud.dlp_v2.types.dlp import Action +from google.cloud.dlp_v2.types.dlp import ActivateJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import AnalyzeDataSourceRiskDetails +from google.cloud.dlp_v2.types.dlp import BoundingBox +from google.cloud.dlp_v2.types.dlp import BucketingConfig +from google.cloud.dlp_v2.types.dlp import ByteContentItem +from google.cloud.dlp_v2.types.dlp import CancelDlpJobRequest +from google.cloud.dlp_v2.types.dlp import CharacterMaskConfig +from google.cloud.dlp_v2.types.dlp import CharsToIgnore +from google.cloud.dlp_v2.types.dlp import Color +from google.cloud.dlp_v2.types.dlp import Container +from google.cloud.dlp_v2.types.dlp import ContentItem +from google.cloud.dlp_v2.types.dlp import ContentLocation +from google.cloud.dlp_v2.types.dlp import CreateDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import CreateDlpJobRequest +from google.cloud.dlp_v2.types.dlp import CreateInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import CreateJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import CreateStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import CryptoDeterministicConfig +from google.cloud.dlp_v2.types.dlp import CryptoHashConfig +from google.cloud.dlp_v2.types.dlp import CryptoKey +from google.cloud.dlp_v2.types.dlp import CryptoReplaceFfxFpeConfig +from google.cloud.dlp_v2.types.dlp import DataProfileAction +from google.cloud.dlp_v2.types.dlp import DataProfileConfigSnapshot +from google.cloud.dlp_v2.types.dlp import DataProfileJobConfig +from google.cloud.dlp_v2.types.dlp import DataProfileLocation +from google.cloud.dlp_v2.types.dlp import DataProfilePubSubCondition +from google.cloud.dlp_v2.types.dlp import DataProfilePubSubMessage +from google.cloud.dlp_v2.types.dlp import DataRiskLevel +from google.cloud.dlp_v2.types.dlp import DateShiftConfig +from google.cloud.dlp_v2.types.dlp import DateTime +from google.cloud.dlp_v2.types.dlp import DeidentifyConfig +from google.cloud.dlp_v2.types.dlp import DeidentifyContentRequest +from google.cloud.dlp_v2.types.dlp import DeidentifyContentResponse +from google.cloud.dlp_v2.types.dlp import DeidentifyTemplate +from google.cloud.dlp_v2.types.dlp import DeleteDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import DeleteDlpJobRequest +from google.cloud.dlp_v2.types.dlp import DeleteInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import DeleteJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import DeleteStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import DlpJob +from google.cloud.dlp_v2.types.dlp import DocumentLocation +from google.cloud.dlp_v2.types.dlp import Error +from google.cloud.dlp_v2.types.dlp import ExcludeByHotword +from google.cloud.dlp_v2.types.dlp import ExcludeInfoTypes +from google.cloud.dlp_v2.types.dlp import ExclusionRule +from google.cloud.dlp_v2.types.dlp import FieldTransformation +from google.cloud.dlp_v2.types.dlp import Finding +from google.cloud.dlp_v2.types.dlp import FinishDlpJobRequest +from google.cloud.dlp_v2.types.dlp import FixedSizeBucketingConfig +from google.cloud.dlp_v2.types.dlp import GetDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import GetDlpJobRequest +from google.cloud.dlp_v2.types.dlp import GetInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import GetJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import GetStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import HybridContentItem +from google.cloud.dlp_v2.types.dlp import HybridFindingDetails +from google.cloud.dlp_v2.types.dlp import HybridInspectDlpJobRequest +from google.cloud.dlp_v2.types.dlp import HybridInspectJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import HybridInspectResponse +from google.cloud.dlp_v2.types.dlp import HybridInspectStatistics +from google.cloud.dlp_v2.types.dlp import ImageLocation +from google.cloud.dlp_v2.types.dlp import ImageTransformations +from google.cloud.dlp_v2.types.dlp import InfoTypeCategory +from google.cloud.dlp_v2.types.dlp import InfoTypeDescription +from google.cloud.dlp_v2.types.dlp import InfoTypeStats +from google.cloud.dlp_v2.types.dlp import InfoTypeSummary +from google.cloud.dlp_v2.types.dlp import InfoTypeTransformations +from google.cloud.dlp_v2.types.dlp import InspectConfig +from google.cloud.dlp_v2.types.dlp import InspectContentRequest +from google.cloud.dlp_v2.types.dlp import InspectContentResponse +from google.cloud.dlp_v2.types.dlp import InspectDataSourceDetails +from google.cloud.dlp_v2.types.dlp import InspectionRule +from google.cloud.dlp_v2.types.dlp import InspectionRuleSet +from google.cloud.dlp_v2.types.dlp import InspectJobConfig +from google.cloud.dlp_v2.types.dlp import InspectResult +from google.cloud.dlp_v2.types.dlp import InspectTemplate +from google.cloud.dlp_v2.types.dlp import JobTrigger +from google.cloud.dlp_v2.types.dlp import KmsWrappedCryptoKey +from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryConfig +from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryStats +from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesRequest +from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesResponse +from google.cloud.dlp_v2.types.dlp import ListDlpJobsRequest +from google.cloud.dlp_v2.types.dlp import ListDlpJobsResponse +from google.cloud.dlp_v2.types.dlp import ListInfoTypesRequest +from google.cloud.dlp_v2.types.dlp import ListInfoTypesResponse +from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesRequest +from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesResponse +from google.cloud.dlp_v2.types.dlp import ListJobTriggersRequest +from google.cloud.dlp_v2.types.dlp import ListJobTriggersResponse +from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesRequest +from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesResponse +from google.cloud.dlp_v2.types.dlp import Location +from google.cloud.dlp_v2.types.dlp import Manual +from google.cloud.dlp_v2.types.dlp import MetadataLocation +from google.cloud.dlp_v2.types.dlp import OtherInfoTypeSummary +from google.cloud.dlp_v2.types.dlp import OutputStorageConfig +from google.cloud.dlp_v2.types.dlp import PrimitiveTransformation +from google.cloud.dlp_v2.types.dlp import PrivacyMetric +from google.cloud.dlp_v2.types.dlp import ProfileStatus +from google.cloud.dlp_v2.types.dlp import QuasiId +from google.cloud.dlp_v2.types.dlp import QuoteInfo +from google.cloud.dlp_v2.types.dlp import Range +from google.cloud.dlp_v2.types.dlp import RecordCondition +from google.cloud.dlp_v2.types.dlp import RecordLocation +from google.cloud.dlp_v2.types.dlp import RecordSuppression +from google.cloud.dlp_v2.types.dlp import RecordTransformation +from google.cloud.dlp_v2.types.dlp import RecordTransformations +from google.cloud.dlp_v2.types.dlp import RedactConfig +from google.cloud.dlp_v2.types.dlp import RedactImageRequest +from google.cloud.dlp_v2.types.dlp import RedactImageResponse +from google.cloud.dlp_v2.types.dlp import ReidentifyContentRequest +from google.cloud.dlp_v2.types.dlp import ReidentifyContentResponse +from google.cloud.dlp_v2.types.dlp import ReplaceDictionaryConfig +from google.cloud.dlp_v2.types.dlp import ReplaceValueConfig +from google.cloud.dlp_v2.types.dlp import ReplaceWithInfoTypeConfig +from google.cloud.dlp_v2.types.dlp import RiskAnalysisJobConfig +from google.cloud.dlp_v2.types.dlp import Schedule +from google.cloud.dlp_v2.types.dlp import StatisticalTable +from google.cloud.dlp_v2.types.dlp import StorageMetadataLabel +from google.cloud.dlp_v2.types.dlp import StoredInfoType +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeConfig +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeStats +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeVersion +from google.cloud.dlp_v2.types.dlp import Table +from google.cloud.dlp_v2.types.dlp import TableDataProfile +from google.cloud.dlp_v2.types.dlp import TableLocation +from google.cloud.dlp_v2.types.dlp import TimePartConfig +from google.cloud.dlp_v2.types.dlp import TransformationConfig +from google.cloud.dlp_v2.types.dlp import TransformationDescription +from google.cloud.dlp_v2.types.dlp import TransformationDetails +from google.cloud.dlp_v2.types.dlp import TransformationDetailsStorageConfig +from google.cloud.dlp_v2.types.dlp import TransformationErrorHandling +from google.cloud.dlp_v2.types.dlp import TransformationLocation +from google.cloud.dlp_v2.types.dlp import TransformationOverview +from google.cloud.dlp_v2.types.dlp import TransformationResultStatus +from google.cloud.dlp_v2.types.dlp import TransformationSummary +from google.cloud.dlp_v2.types.dlp import TransientCryptoKey +from google.cloud.dlp_v2.types.dlp import UnwrappedCryptoKey +from google.cloud.dlp_v2.types.dlp import UpdateDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import UpdateInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import UpdateJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import UpdateStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import Value +from google.cloud.dlp_v2.types.dlp import ValueFrequency +from google.cloud.dlp_v2.types.dlp import VersionDescription +from google.cloud.dlp_v2.types.dlp import ContentOption +from google.cloud.dlp_v2.types.dlp import DlpJobType +from google.cloud.dlp_v2.types.dlp import EncryptionStatus +from google.cloud.dlp_v2.types.dlp import InfoTypeSupportedBy +from google.cloud.dlp_v2.types.dlp import MatchingType +from google.cloud.dlp_v2.types.dlp import MetadataType +from google.cloud.dlp_v2.types.dlp import RelationalOperator +from google.cloud.dlp_v2.types.dlp import ResourceVisibility +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeState +from google.cloud.dlp_v2.types.dlp import TransformationContainerType +from google.cloud.dlp_v2.types.dlp import TransformationResultStatusType +from google.cloud.dlp_v2.types.dlp import TransformationType +from google.cloud.dlp_v2.types.storage import BigQueryField +from google.cloud.dlp_v2.types.storage import BigQueryKey +from google.cloud.dlp_v2.types.storage import BigQueryOptions +from google.cloud.dlp_v2.types.storage import BigQueryTable +from google.cloud.dlp_v2.types.storage import CloudStorageFileSet +from google.cloud.dlp_v2.types.storage import CloudStorageOptions +from google.cloud.dlp_v2.types.storage import CloudStoragePath +from google.cloud.dlp_v2.types.storage import CloudStorageRegexFileSet +from google.cloud.dlp_v2.types.storage import CustomInfoType +from google.cloud.dlp_v2.types.storage import DatastoreKey +from google.cloud.dlp_v2.types.storage import DatastoreOptions +from google.cloud.dlp_v2.types.storage import EntityId +from google.cloud.dlp_v2.types.storage import FieldId +from google.cloud.dlp_v2.types.storage import HybridOptions +from google.cloud.dlp_v2.types.storage import InfoType +from google.cloud.dlp_v2.types.storage import Key +from google.cloud.dlp_v2.types.storage import KindExpression +from google.cloud.dlp_v2.types.storage import PartitionId +from google.cloud.dlp_v2.types.storage import RecordKey +from google.cloud.dlp_v2.types.storage import SensitivityScore +from google.cloud.dlp_v2.types.storage import StorageConfig +from google.cloud.dlp_v2.types.storage import StoredType +from google.cloud.dlp_v2.types.storage import TableOptions +from google.cloud.dlp_v2.types.storage import FileType +from google.cloud.dlp_v2.types.storage import Likelihood + +__all__ = ('DlpServiceClient', + 'DlpServiceAsyncClient', + 'Action', + 'ActivateJobTriggerRequest', + 'AnalyzeDataSourceRiskDetails', + 'BoundingBox', + 'BucketingConfig', + 'ByteContentItem', + 'CancelDlpJobRequest', + 'CharacterMaskConfig', + 'CharsToIgnore', + 'Color', + 'Container', + 'ContentItem', + 'ContentLocation', + 'CreateDeidentifyTemplateRequest', + 'CreateDlpJobRequest', + 'CreateInspectTemplateRequest', + 'CreateJobTriggerRequest', + 'CreateStoredInfoTypeRequest', + 'CryptoDeterministicConfig', + 'CryptoHashConfig', + 'CryptoKey', + 'CryptoReplaceFfxFpeConfig', + 'DataProfileAction', + 'DataProfileConfigSnapshot', + 'DataProfileJobConfig', + 'DataProfileLocation', + 'DataProfilePubSubCondition', + 'DataProfilePubSubMessage', + 'DataRiskLevel', + 'DateShiftConfig', + 'DateTime', + 'DeidentifyConfig', + 'DeidentifyContentRequest', + 'DeidentifyContentResponse', + 'DeidentifyTemplate', + 'DeleteDeidentifyTemplateRequest', + 'DeleteDlpJobRequest', + 'DeleteInspectTemplateRequest', + 'DeleteJobTriggerRequest', + 'DeleteStoredInfoTypeRequest', + 'DlpJob', + 'DocumentLocation', + 'Error', + 'ExcludeByHotword', + 'ExcludeInfoTypes', + 'ExclusionRule', + 'FieldTransformation', + 'Finding', + 'FinishDlpJobRequest', + 'FixedSizeBucketingConfig', + 'GetDeidentifyTemplateRequest', + 'GetDlpJobRequest', + 'GetInspectTemplateRequest', + 'GetJobTriggerRequest', + 'GetStoredInfoTypeRequest', + 'HybridContentItem', + 'HybridFindingDetails', + 'HybridInspectDlpJobRequest', + 'HybridInspectJobTriggerRequest', + 'HybridInspectResponse', + 'HybridInspectStatistics', + 'ImageLocation', + 'ImageTransformations', + 'InfoTypeCategory', + 'InfoTypeDescription', + 'InfoTypeStats', + 'InfoTypeSummary', + 'InfoTypeTransformations', + 'InspectConfig', + 'InspectContentRequest', + 'InspectContentResponse', + 'InspectDataSourceDetails', + 'InspectionRule', + 'InspectionRuleSet', + 'InspectJobConfig', + 'InspectResult', + 'InspectTemplate', + 'JobTrigger', + 'KmsWrappedCryptoKey', + 'LargeCustomDictionaryConfig', + 'LargeCustomDictionaryStats', + 'ListDeidentifyTemplatesRequest', + 'ListDeidentifyTemplatesResponse', + 'ListDlpJobsRequest', + 'ListDlpJobsResponse', + 'ListInfoTypesRequest', + 'ListInfoTypesResponse', + 'ListInspectTemplatesRequest', + 'ListInspectTemplatesResponse', + 'ListJobTriggersRequest', + 'ListJobTriggersResponse', + 'ListStoredInfoTypesRequest', + 'ListStoredInfoTypesResponse', + 'Location', + 'Manual', + 'MetadataLocation', + 'OtherInfoTypeSummary', + 'OutputStorageConfig', + 'PrimitiveTransformation', + 'PrivacyMetric', + 'ProfileStatus', + 'QuasiId', + 'QuoteInfo', + 'Range', + 'RecordCondition', + 'RecordLocation', + 'RecordSuppression', + 'RecordTransformation', + 'RecordTransformations', + 'RedactConfig', + 'RedactImageRequest', + 'RedactImageResponse', + 'ReidentifyContentRequest', + 'ReidentifyContentResponse', + 'ReplaceDictionaryConfig', + 'ReplaceValueConfig', + 'ReplaceWithInfoTypeConfig', + 'RiskAnalysisJobConfig', + 'Schedule', + 'StatisticalTable', + 'StorageMetadataLabel', + 'StoredInfoType', + 'StoredInfoTypeConfig', + 'StoredInfoTypeStats', + 'StoredInfoTypeVersion', + 'Table', + 'TableDataProfile', + 'TableLocation', + 'TimePartConfig', + 'TransformationConfig', + 'TransformationDescription', + 'TransformationDetails', + 'TransformationDetailsStorageConfig', + 'TransformationErrorHandling', + 'TransformationLocation', + 'TransformationOverview', + 'TransformationResultStatus', + 'TransformationSummary', + 'TransientCryptoKey', + 'UnwrappedCryptoKey', + 'UpdateDeidentifyTemplateRequest', + 'UpdateInspectTemplateRequest', + 'UpdateJobTriggerRequest', + 'UpdateStoredInfoTypeRequest', + 'Value', + 'ValueFrequency', + 'VersionDescription', + 'ContentOption', + 'DlpJobType', + 'EncryptionStatus', + 'InfoTypeSupportedBy', + 'MatchingType', + 'MetadataType', + 'RelationalOperator', + 'ResourceVisibility', + 'StoredInfoTypeState', + 'TransformationContainerType', + 'TransformationResultStatusType', + 'TransformationType', + 'BigQueryField', + 'BigQueryKey', + 'BigQueryOptions', + 'BigQueryTable', + 'CloudStorageFileSet', + 'CloudStorageOptions', + 'CloudStoragePath', + 'CloudStorageRegexFileSet', + 'CustomInfoType', + 'DatastoreKey', + 'DatastoreOptions', + 'EntityId', + 'FieldId', + 'HybridOptions', + 'InfoType', + 'Key', + 'KindExpression', + 'PartitionId', + 'RecordKey', + 'SensitivityScore', + 'StorageConfig', + 'StoredType', + 'TableOptions', + 'FileType', + 'Likelihood', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp/gapic_version.py b/owl-bot-staging/v2/google/cloud/dlp/gapic_version.py new file mode 100644 index 00000000..405b1ceb --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/dlp/py.typed b/owl-bot-staging/v2/google/cloud/dlp/py.typed new file mode 100644 index 00000000..23d89ef3 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py new file mode 100644 index 00000000..8397a3ad --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py @@ -0,0 +1,396 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dlp_v2 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.dlp_service import DlpServiceClient +from .services.dlp_service import DlpServiceAsyncClient + +from .types.dlp import Action +from .types.dlp import ActivateJobTriggerRequest +from .types.dlp import AnalyzeDataSourceRiskDetails +from .types.dlp import BoundingBox +from .types.dlp import BucketingConfig +from .types.dlp import ByteContentItem +from .types.dlp import CancelDlpJobRequest +from .types.dlp import CharacterMaskConfig +from .types.dlp import CharsToIgnore +from .types.dlp import Color +from .types.dlp import Container +from .types.dlp import ContentItem +from .types.dlp import ContentLocation +from .types.dlp import CreateDeidentifyTemplateRequest +from .types.dlp import CreateDlpJobRequest +from .types.dlp import CreateInspectTemplateRequest +from .types.dlp import CreateJobTriggerRequest +from .types.dlp import CreateStoredInfoTypeRequest +from .types.dlp import CryptoDeterministicConfig +from .types.dlp import CryptoHashConfig +from .types.dlp import CryptoKey +from .types.dlp import CryptoReplaceFfxFpeConfig +from .types.dlp import DataProfileAction +from .types.dlp import DataProfileConfigSnapshot +from .types.dlp import DataProfileJobConfig +from .types.dlp import DataProfileLocation +from .types.dlp import DataProfilePubSubCondition +from .types.dlp import DataProfilePubSubMessage +from .types.dlp import DataRiskLevel +from .types.dlp import DateShiftConfig +from .types.dlp import DateTime +from .types.dlp import DeidentifyConfig +from .types.dlp import DeidentifyContentRequest +from .types.dlp import DeidentifyContentResponse +from .types.dlp import DeidentifyTemplate +from .types.dlp import DeleteDeidentifyTemplateRequest +from .types.dlp import DeleteDlpJobRequest +from .types.dlp import DeleteInspectTemplateRequest +from .types.dlp import DeleteJobTriggerRequest +from .types.dlp import DeleteStoredInfoTypeRequest +from .types.dlp import DlpJob +from .types.dlp import DocumentLocation +from .types.dlp import Error +from .types.dlp import ExcludeByHotword +from .types.dlp import ExcludeInfoTypes +from .types.dlp import ExclusionRule +from .types.dlp import FieldTransformation +from .types.dlp import Finding +from .types.dlp import FinishDlpJobRequest +from .types.dlp import FixedSizeBucketingConfig +from .types.dlp import GetDeidentifyTemplateRequest +from .types.dlp import GetDlpJobRequest +from .types.dlp import GetInspectTemplateRequest +from .types.dlp import GetJobTriggerRequest +from .types.dlp import GetStoredInfoTypeRequest +from .types.dlp import HybridContentItem +from .types.dlp import HybridFindingDetails +from .types.dlp import HybridInspectDlpJobRequest +from .types.dlp import HybridInspectJobTriggerRequest +from .types.dlp import HybridInspectResponse +from .types.dlp import HybridInspectStatistics +from .types.dlp import ImageLocation +from .types.dlp import ImageTransformations +from .types.dlp import InfoTypeCategory +from .types.dlp import InfoTypeDescription +from .types.dlp import InfoTypeStats +from .types.dlp import InfoTypeSummary +from .types.dlp import InfoTypeTransformations +from .types.dlp import InspectConfig +from .types.dlp import InspectContentRequest +from .types.dlp import InspectContentResponse +from .types.dlp import InspectDataSourceDetails +from .types.dlp import InspectionRule +from .types.dlp import InspectionRuleSet +from .types.dlp import InspectJobConfig +from .types.dlp import InspectResult +from .types.dlp import InspectTemplate +from .types.dlp import JobTrigger +from .types.dlp import KmsWrappedCryptoKey +from .types.dlp import LargeCustomDictionaryConfig +from .types.dlp import LargeCustomDictionaryStats +from .types.dlp import ListDeidentifyTemplatesRequest +from .types.dlp import ListDeidentifyTemplatesResponse +from .types.dlp import ListDlpJobsRequest +from .types.dlp import ListDlpJobsResponse +from .types.dlp import ListInfoTypesRequest +from .types.dlp import ListInfoTypesResponse +from .types.dlp import ListInspectTemplatesRequest +from .types.dlp import ListInspectTemplatesResponse +from .types.dlp import ListJobTriggersRequest +from .types.dlp import ListJobTriggersResponse +from .types.dlp import ListStoredInfoTypesRequest +from .types.dlp import ListStoredInfoTypesResponse +from .types.dlp import Location +from .types.dlp import Manual +from .types.dlp import MetadataLocation +from .types.dlp import OtherInfoTypeSummary +from .types.dlp import OutputStorageConfig +from .types.dlp import PrimitiveTransformation +from .types.dlp import PrivacyMetric +from .types.dlp import ProfileStatus +from .types.dlp import QuasiId +from .types.dlp import QuoteInfo +from .types.dlp import Range +from .types.dlp import RecordCondition +from .types.dlp import RecordLocation +from .types.dlp import RecordSuppression +from .types.dlp import RecordTransformation +from .types.dlp import RecordTransformations +from .types.dlp import RedactConfig +from .types.dlp import RedactImageRequest +from .types.dlp import RedactImageResponse +from .types.dlp import ReidentifyContentRequest +from .types.dlp import ReidentifyContentResponse +from .types.dlp import ReplaceDictionaryConfig +from .types.dlp import ReplaceValueConfig +from .types.dlp import ReplaceWithInfoTypeConfig +from .types.dlp import RiskAnalysisJobConfig +from .types.dlp import Schedule +from .types.dlp import StatisticalTable +from .types.dlp import StorageMetadataLabel +from .types.dlp import StoredInfoType +from .types.dlp import StoredInfoTypeConfig +from .types.dlp import StoredInfoTypeStats +from .types.dlp import StoredInfoTypeVersion +from .types.dlp import Table +from .types.dlp import TableDataProfile +from .types.dlp import TableLocation +from .types.dlp import TimePartConfig +from .types.dlp import TransformationConfig +from .types.dlp import TransformationDescription +from .types.dlp import TransformationDetails +from .types.dlp import TransformationDetailsStorageConfig +from .types.dlp import TransformationErrorHandling +from .types.dlp import TransformationLocation +from .types.dlp import TransformationOverview +from .types.dlp import TransformationResultStatus +from .types.dlp import TransformationSummary +from .types.dlp import TransientCryptoKey +from .types.dlp import UnwrappedCryptoKey +from .types.dlp import UpdateDeidentifyTemplateRequest +from .types.dlp import UpdateInspectTemplateRequest +from .types.dlp import UpdateJobTriggerRequest +from .types.dlp import UpdateStoredInfoTypeRequest +from .types.dlp import Value +from .types.dlp import ValueFrequency +from .types.dlp import VersionDescription +from .types.dlp import ContentOption +from .types.dlp import DlpJobType +from .types.dlp import EncryptionStatus +from .types.dlp import InfoTypeSupportedBy +from .types.dlp import MatchingType +from .types.dlp import MetadataType +from .types.dlp import RelationalOperator +from .types.dlp import ResourceVisibility +from .types.dlp import StoredInfoTypeState +from .types.dlp import TransformationContainerType +from .types.dlp import TransformationResultStatusType +from .types.dlp import TransformationType +from .types.storage import BigQueryField +from .types.storage import BigQueryKey +from .types.storage import BigQueryOptions +from .types.storage import BigQueryTable +from .types.storage import CloudStorageFileSet +from .types.storage import CloudStorageOptions +from .types.storage import CloudStoragePath +from .types.storage import CloudStorageRegexFileSet +from .types.storage import CustomInfoType +from .types.storage import DatastoreKey +from .types.storage import DatastoreOptions +from .types.storage import EntityId +from .types.storage import FieldId +from .types.storage import HybridOptions +from .types.storage import InfoType +from .types.storage import Key +from .types.storage import KindExpression +from .types.storage import PartitionId +from .types.storage import RecordKey +from .types.storage import SensitivityScore +from .types.storage import StorageConfig +from .types.storage import StoredType +from .types.storage import TableOptions +from .types.storage import FileType +from .types.storage import Likelihood + +__all__ = ( + 'DlpServiceAsyncClient', +'Action', +'ActivateJobTriggerRequest', +'AnalyzeDataSourceRiskDetails', +'BigQueryField', +'BigQueryKey', +'BigQueryOptions', +'BigQueryTable', +'BoundingBox', +'BucketingConfig', +'ByteContentItem', +'CancelDlpJobRequest', +'CharacterMaskConfig', +'CharsToIgnore', +'CloudStorageFileSet', +'CloudStorageOptions', +'CloudStoragePath', +'CloudStorageRegexFileSet', +'Color', +'Container', +'ContentItem', +'ContentLocation', +'ContentOption', +'CreateDeidentifyTemplateRequest', +'CreateDlpJobRequest', +'CreateInspectTemplateRequest', +'CreateJobTriggerRequest', +'CreateStoredInfoTypeRequest', +'CryptoDeterministicConfig', +'CryptoHashConfig', +'CryptoKey', +'CryptoReplaceFfxFpeConfig', +'CustomInfoType', +'DataProfileAction', +'DataProfileConfigSnapshot', +'DataProfileJobConfig', +'DataProfileLocation', +'DataProfilePubSubCondition', +'DataProfilePubSubMessage', +'DataRiskLevel', +'DatastoreKey', +'DatastoreOptions', +'DateShiftConfig', +'DateTime', +'DeidentifyConfig', +'DeidentifyContentRequest', +'DeidentifyContentResponse', +'DeidentifyTemplate', +'DeleteDeidentifyTemplateRequest', +'DeleteDlpJobRequest', +'DeleteInspectTemplateRequest', +'DeleteJobTriggerRequest', +'DeleteStoredInfoTypeRequest', +'DlpJob', +'DlpJobType', +'DlpServiceClient', +'DocumentLocation', +'EncryptionStatus', +'EntityId', +'Error', +'ExcludeByHotword', +'ExcludeInfoTypes', +'ExclusionRule', +'FieldId', +'FieldTransformation', +'FileType', +'Finding', +'FinishDlpJobRequest', +'FixedSizeBucketingConfig', +'GetDeidentifyTemplateRequest', +'GetDlpJobRequest', +'GetInspectTemplateRequest', +'GetJobTriggerRequest', +'GetStoredInfoTypeRequest', +'HybridContentItem', +'HybridFindingDetails', +'HybridInspectDlpJobRequest', +'HybridInspectJobTriggerRequest', +'HybridInspectResponse', +'HybridInspectStatistics', +'HybridOptions', +'ImageLocation', +'ImageTransformations', +'InfoType', +'InfoTypeCategory', +'InfoTypeDescription', +'InfoTypeStats', +'InfoTypeSummary', +'InfoTypeSupportedBy', +'InfoTypeTransformations', +'InspectConfig', +'InspectContentRequest', +'InspectContentResponse', +'InspectDataSourceDetails', +'InspectJobConfig', +'InspectResult', +'InspectTemplate', +'InspectionRule', +'InspectionRuleSet', +'JobTrigger', +'Key', +'KindExpression', +'KmsWrappedCryptoKey', +'LargeCustomDictionaryConfig', +'LargeCustomDictionaryStats', +'Likelihood', +'ListDeidentifyTemplatesRequest', +'ListDeidentifyTemplatesResponse', +'ListDlpJobsRequest', +'ListDlpJobsResponse', +'ListInfoTypesRequest', +'ListInfoTypesResponse', +'ListInspectTemplatesRequest', +'ListInspectTemplatesResponse', +'ListJobTriggersRequest', +'ListJobTriggersResponse', +'ListStoredInfoTypesRequest', +'ListStoredInfoTypesResponse', +'Location', +'Manual', +'MatchingType', +'MetadataLocation', +'MetadataType', +'OtherInfoTypeSummary', +'OutputStorageConfig', +'PartitionId', +'PrimitiveTransformation', +'PrivacyMetric', +'ProfileStatus', +'QuasiId', +'QuoteInfo', +'Range', +'RecordCondition', +'RecordKey', +'RecordLocation', +'RecordSuppression', +'RecordTransformation', +'RecordTransformations', +'RedactConfig', +'RedactImageRequest', +'RedactImageResponse', +'ReidentifyContentRequest', +'ReidentifyContentResponse', +'RelationalOperator', +'ReplaceDictionaryConfig', +'ReplaceValueConfig', +'ReplaceWithInfoTypeConfig', +'ResourceVisibility', +'RiskAnalysisJobConfig', +'Schedule', +'SensitivityScore', +'StatisticalTable', +'StorageConfig', +'StorageMetadataLabel', +'StoredInfoType', +'StoredInfoTypeConfig', +'StoredInfoTypeState', +'StoredInfoTypeStats', +'StoredInfoTypeVersion', +'StoredType', +'Table', +'TableDataProfile', +'TableLocation', +'TableOptions', +'TimePartConfig', +'TransformationConfig', +'TransformationContainerType', +'TransformationDescription', +'TransformationDetails', +'TransformationDetailsStorageConfig', +'TransformationErrorHandling', +'TransformationLocation', +'TransformationOverview', +'TransformationResultStatus', +'TransformationResultStatusType', +'TransformationSummary', +'TransformationType', +'TransientCryptoKey', +'UnwrappedCryptoKey', +'UpdateDeidentifyTemplateRequest', +'UpdateInspectTemplateRequest', +'UpdateJobTriggerRequest', +'UpdateStoredInfoTypeRequest', +'Value', +'ValueFrequency', +'VersionDescription', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json new file mode 100644 index 00000000..634002d4 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json @@ -0,0 +1,538 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.dlp_v2", + "protoPackage": "google.privacy.dlp.v2", + "schema": "1.0", + "services": { + "DlpService": { + "clients": { + "grpc": { + "libraryClient": "DlpServiceClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DlpServiceAsyncClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + }, + "rest": { + "libraryClient": "DlpServiceClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py new file mode 100644 index 00000000..405b1ceb --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed b/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed new file mode 100644 index 00000000..23d89ef3 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py new file mode 100644 index 00000000..e8e1c384 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py new file mode 100644 index 00000000..aa9c062a --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DlpServiceClient +from .async_client import DlpServiceAsyncClient + +__all__ = ( + 'DlpServiceClient', + 'DlpServiceAsyncClient', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py new file mode 100644 index 00000000..041479c1 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py @@ -0,0 +1,4143 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dlp_v2 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.types import dlp +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport +from .client import DlpServiceClient + + +class DlpServiceAsyncClient: + """The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + """ + + _client: DlpServiceClient + + DEFAULT_ENDPOINT = DlpServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DlpServiceClient.DEFAULT_MTLS_ENDPOINT + + deidentify_template_path = staticmethod(DlpServiceClient.deidentify_template_path) + parse_deidentify_template_path = staticmethod(DlpServiceClient.parse_deidentify_template_path) + dlp_content_path = staticmethod(DlpServiceClient.dlp_content_path) + parse_dlp_content_path = staticmethod(DlpServiceClient.parse_dlp_content_path) + dlp_job_path = staticmethod(DlpServiceClient.dlp_job_path) + parse_dlp_job_path = staticmethod(DlpServiceClient.parse_dlp_job_path) + finding_path = staticmethod(DlpServiceClient.finding_path) + parse_finding_path = staticmethod(DlpServiceClient.parse_finding_path) + inspect_template_path = staticmethod(DlpServiceClient.inspect_template_path) + parse_inspect_template_path = staticmethod(DlpServiceClient.parse_inspect_template_path) + job_trigger_path = staticmethod(DlpServiceClient.job_trigger_path) + parse_job_trigger_path = staticmethod(DlpServiceClient.parse_job_trigger_path) + stored_info_type_path = staticmethod(DlpServiceClient.stored_info_type_path) + parse_stored_info_type_path = staticmethod(DlpServiceClient.parse_stored_info_type_path) + common_billing_account_path = staticmethod(DlpServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(DlpServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(DlpServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(DlpServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(DlpServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(DlpServiceClient.parse_common_organization_path) + common_project_path = staticmethod(DlpServiceClient.common_project_path) + parse_common_project_path = staticmethod(DlpServiceClient.parse_common_project_path) + common_location_path = staticmethod(DlpServiceClient.common_location_path) + parse_common_location_path = staticmethod(DlpServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceAsyncClient: The constructed client. + """ + return DlpServiceClient.from_service_account_info.__func__(DlpServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceAsyncClient: The constructed client. + """ + return DlpServiceClient.from_service_account_file.__func__(DlpServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DlpServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DlpServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DlpServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(DlpServiceClient).get_transport_class, type(DlpServiceClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DlpServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dlp service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DlpServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DlpServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def inspect_content(self, + request: Optional[Union[dlp.InspectContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectContentResponse: + r"""Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_inspect_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.InspectContentRequest( + ) + + # Make the request + response = await client.inspect_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.InspectContentRequest, dict]]): + The request object. Request to search for potentially + sensitive info in a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectContentResponse: + Results of inspecting an item. + """ + # Create or coerce a protobuf request object. + request = dlp.InspectContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.inspect_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def redact_image(self, + request: Optional[Union[dlp.RedactImageRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.RedactImageResponse: + r"""Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_redact_image(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.RedactImageRequest( + ) + + # Make the request + response = await client.redact_image(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.RedactImageRequest, dict]]): + The request object. Request to search for potentially + sensitive info in an image and redact it by covering it + with a colored rectangle. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.RedactImageResponse: + Results of redacting an image. + """ + # Create or coerce a protobuf request object. + request = dlp.RedactImageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.redact_image, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def deidentify_content(self, + request: Optional[Union[dlp.DeidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyContentResponse: + r"""De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_deidentify_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeidentifyContentRequest( + ) + + # Make the request + response = await client.deidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeidentifyContentRequest, dict]]): + The request object. Request to de-identify a + ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + # Create or coerce a protobuf request object. + request = dlp.DeidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.deidentify_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def reidentify_content(self, + request: Optional[Union[dlp.ReidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ReidentifyContentResponse: + r"""Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_reidentify_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ReidentifyContentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.reidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ReidentifyContentRequest, dict]]): + The request object. Request to re-identify an item. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ReidentifyContentResponse: + Results of re-identifying an item. + """ + # Create or coerce a protobuf request object. + request = dlp.ReidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reidentify_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_info_types(self, + request: Optional[Union[dlp.ListInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListInfoTypesResponse: + r"""Returns a list of the sensitive information types + that DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_info_types(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListInfoTypesRequest( + ) + + # Make the request + response = await client.list_info_types(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListInfoTypesRequest, dict]]): + The request object. Request for the list of infoTypes. + parent (:class:`str`): + The parent resource name. + + The format of this value is as follows: + + :: + + locations/LOCATION_ID + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListInfoTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_info_types, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_inspect_template(self, + request: Optional[Union[dlp.CreateInspectTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/dlp/docs/creating-templates to + learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateInspectTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateInspectTemplateRequest, dict]]): + The request object. Request message for + CreateInspectTemplate. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): + Required. The InspectTemplate to + create. + + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_template]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_template is not None: + request.inspect_template = inspect_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_inspect_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_inspect_template(self, + request: Optional[Union[dlp.UpdateInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.update_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateInspectTemplateRequest, dict]]): + The request object. Request message for + UpdateInspectTemplate. + name (:class:`str`): + Required. Resource name of organization and + inspectTemplate to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): + New InspectTemplate value. + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, inspect_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.UpdateInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if inspect_template is not None: + request.inspect_template = inspect_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_inspect_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_inspect_template(self, + request: Optional[Union[dlp.GetInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetInspectTemplateRequest, dict]]): + The request object. Request message for + GetInspectTemplate. + name (:class:`str`): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_inspect_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_inspect_templates(self, + request: Optional[Union[dlp.ListInspectTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInspectTemplatesAsyncPager: + r"""Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_inspect_templates(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListInspectTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inspect_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListInspectTemplatesRequest, dict]]): + The request object. Request message for + ListInspectTemplates. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager: + Response message for + ListInspectTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListInspectTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_inspect_templates, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInspectTemplatesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_inspect_template(self, + request: Optional[Union[dlp.DeleteInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteInspectTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_inspect_template(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteInspectTemplateRequest, dict]]): + The request object. Request message for + DeleteInspectTemplate. + name (:class:`str`): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_inspect_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_deidentify_template(self, + request: Optional[Union[dlp.CreateDeidentifyTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDeidentifyTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest, dict]]): + The request object. Request message for + CreateDeidentifyTemplate. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): + Required. The DeidentifyTemplate to + create. + + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deidentify_template]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deidentify_template is not None: + request.deidentify_template = deidentify_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_deidentify_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_deidentify_template(self, + request: Optional[Union[dlp.UpdateDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Updates the DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.update_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest, dict]]): + The request object. Request message for + UpdateDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): + New DeidentifyTemplate value. + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, deidentify_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.UpdateDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if deidentify_template is not None: + request.deidentify_template = deidentify_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_deidentify_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_deidentify_template(self, + request: Optional[Union[dlp.GetDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Gets a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest, dict]]): + The request object. Request message for + GetDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of the organization and + deidentify template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_deidentify_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_deidentify_templates(self, + request: Optional[Union[dlp.ListDeidentifyTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeidentifyTemplatesAsyncPager: + r"""Lists DeidentifyTemplates. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_deidentify_templates(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDeidentifyTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deidentify_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest, dict]]): + The request object. Request message for + ListDeidentifyTemplates. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager: + Response message for + ListDeidentifyTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListDeidentifyTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_deidentify_templates, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDeidentifyTemplatesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_deidentify_template(self, + request: Optional[Union[dlp.DeleteDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_deidentify_template(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest, dict]]): + The request object. Request message for + DeleteDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of the organization and + deidentify template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_deidentify_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_job_trigger(self, + request: Optional[Union[dlp.CreateJobTriggerRequest, dict]] = None, + *, + parent: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + job_trigger = dlp_v2.JobTrigger() + job_trigger.status = "CANCELLED" + + request = dlp_v2.CreateJobTriggerRequest( + parent="parent_value", + job_trigger=job_trigger, + ) + + # Make the request + response = await client.create_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateJobTriggerRequest, dict]]): + The request object. Request message for + CreateJobTrigger. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): + Required. The JobTrigger to create. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job_trigger]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job_trigger is not None: + request.job_trigger = job_trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_job_trigger(self, + request: Optional[Union[dlp.UpdateJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Updates a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.update_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateJobTriggerRequest, dict]]): + The request object. Request message for + UpdateJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): + New JobTrigger value. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, job_trigger, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.UpdateJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if job_trigger is not None: + request.job_trigger = job_trigger + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def hybrid_inspect_job_trigger(self, + request: Optional[Union[dlp.HybridInspectJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_hybrid_inspect_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.hybrid_inspect_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest, dict]]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (:class:`str`): + Required. Resource name of the trigger to execute a + hybrid inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.HybridInspectJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.hybrid_inspect_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job_trigger(self, + request: Optional[Union[dlp.GetJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Gets a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetJobTriggerRequest, dict]]): + The request object. Request message for GetJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_job_triggers(self, + request: Optional[Union[dlp.ListJobTriggersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobTriggersAsyncPager: + r"""Lists job triggers. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_job_triggers(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListJobTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_triggers(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListJobTriggersRequest, dict]]): + The request object. Request message for ListJobTriggers. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager: + Response message for ListJobTriggers. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListJobTriggersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_job_triggers, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobTriggersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_job_trigger(self, + request: Optional[Union[dlp.DeleteJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteJobTriggerRequest( + name="name_value", + ) + + # Make the request + await client.delete_job_trigger(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteJobTriggerRequest, dict]]): + The request object. Request message for + DeleteJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_job_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def activate_job_trigger(self, + request: Optional[Union[dlp.ActivateJobTriggerRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_activate_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ActivateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.activate_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ActivateJobTriggerRequest, dict]]): + The request object. Request message for + ActivateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + request = dlp.ActivateJobTriggerRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.activate_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_dlp_job(self, + request: Optional[Union[dlp.CreateDlpJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_job: Optional[dlp.InspectJobConfig] = None, + risk_job: Optional[dlp.RiskAnalysisJobConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDlpJobRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateDlpJobRequest, dict]]): + The request object. Request message for + CreateDlpJobRequest. Used to initiate long running jobs + such as calculating risk metrics or inspecting Google + Cloud Storage. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_job (:class:`google.cloud.dlp_v2.types.InspectJobConfig`): + An inspection job scans a storage + repository for InfoTypes. + + This corresponds to the ``inspect_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + risk_job (:class:`google.cloud.dlp_v2.types.RiskAnalysisJobConfig`): + A risk analysis job calculates + re-identification risk metrics for a + BigQuery table. + + This corresponds to the ``risk_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_job, risk_job]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_job is not None: + request.inspect_job = inspect_job + if risk_job is not None: + request.risk_job = risk_job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_dlp_jobs(self, + request: Optional[Union[dlp.ListDlpJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDlpJobsAsyncPager: + r"""Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_dlp_jobs(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDlpJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dlp_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListDlpJobsRequest, dict]]): + The request object. The request message for listing DLP + jobs. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager: + The response message for listing DLP + jobs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListDlpJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_dlp_jobs, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDlpJobsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_dlp_job(self, + request: Optional[Union[dlp.GetDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDlpJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetDlpJobRequest, dict]]): + The request object. The request message for + [DlpJobs.GetDlpJob][]. + name (:class:`str`): + Required. The name of the DlpJob + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_dlp_job, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_dlp_job(self, + request: Optional[Union[dlp.DeleteDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.delete_dlp_job(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteDlpJobRequest, dict]]): + The request object. The request message for deleting a + DLP job. + name (:class:`str`): + Required. The name of the DlpJob + resource to be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_dlp_job, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_dlp_job(self, + request: Optional[Union[dlp.CancelDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_cancel_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CancelDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_dlp_job(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CancelDlpJobRequest, dict]]): + The request object. The request message for canceling a + DLP job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = dlp.CancelDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_stored_info_type(self, + request: Optional[Union[dlp.CreateStoredInfoTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateStoredInfoTypeRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest, dict]]): + The request object. Request message for + CreateStoredInfoType. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): + Required. Configuration of the + storedInfoType to create. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, config]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if config is not None: + request.config = config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_stored_info_type, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_stored_info_type(self, + request: Optional[Union[dlp.UpdateStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.update_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest, dict]]): + The request object. Request message for + UpdateStoredInfoType. + name (:class:`str`): + Required. Resource name of organization and + storedInfoType to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): + Updated configuration for the + storedInfoType. If not provided, a new + version of the storedInfoType will be + created with the existing configuration. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.UpdateStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if config is not None: + request.config = config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_stored_info_type, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_stored_info_type(self, + request: Optional[Union[dlp.GetStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Gets a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetStoredInfoTypeRequest, dict]]): + The request object. Request message for + GetStoredInfoType. + name (:class:`str`): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_stored_info_type, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_stored_info_types(self, + request: Optional[Union[dlp.ListStoredInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListStoredInfoTypesAsyncPager: + r"""Lists stored infoTypes. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_stored_info_types(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListStoredInfoTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_stored_info_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListStoredInfoTypesRequest, dict]]): + The request object. Request message for + ListStoredInfoTypes. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager: + Response message for + ListStoredInfoTypes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListStoredInfoTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_stored_info_types, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListStoredInfoTypesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_stored_info_type(self, + request: Optional[Union[dlp.DeleteStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + await client.delete_stored_info_type(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest, dict]]): + The request object. Request message for + DeleteStoredInfoType. + name (:class:`str`): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_stored_info_type, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def hybrid_inspect_dlp_job(self, + request: Optional[Union[dlp.HybridInspectDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_hybrid_inspect_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectDlpJobRequest( + name="name_value", + ) + + # Make the request + response = await client.hybrid_inspect_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.HybridInspectDlpJobRequest, dict]]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (:class:`str`): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.HybridInspectDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.hybrid_inspect_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def finish_dlp_job(self, + request: Optional[Union[dlp.FinishDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_finish_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.FinishDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.finish_dlp_job(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.FinishDlpJobRequest, dict]]): + The request object. The request message for finishing a + DLP hybrid job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = dlp.FinishDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.finish_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DlpServiceAsyncClient", +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py new file mode 100644 index 00000000..9d90a824 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py @@ -0,0 +1,4269 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.dlp_v2 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.types import dlp +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DlpServiceGrpcTransport +from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport +from .transports.rest import DlpServiceRestTransport + + +class DlpServiceClientMeta(type): + """Metaclass for the DlpService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] + _transport_registry["grpc"] = DlpServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DlpServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DlpServiceRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[DlpServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DlpServiceClient(metaclass=DlpServiceClientMeta): + """The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dlp.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DlpServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DlpServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def deidentify_template_path(organization: str,deidentify_template: str,) -> str: + """Returns a fully-qualified deidentify_template string.""" + return "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) + + @staticmethod + def parse_deidentify_template_path(path: str) -> Dict[str,str]: + """Parses a deidentify_template path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/deidentifyTemplates/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def dlp_content_path(project: str,) -> str: + """Returns a fully-qualified dlp_content string.""" + return "projects/{project}/dlpContent".format(project=project, ) + + @staticmethod + def parse_dlp_content_path(path: str) -> Dict[str,str]: + """Parses a dlp_content path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/dlpContent$", path) + return m.groupdict() if m else {} + + @staticmethod + def dlp_job_path(project: str,dlp_job: str,) -> str: + """Returns a fully-qualified dlp_job string.""" + return "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) + + @staticmethod + def parse_dlp_job_path(path: str) -> Dict[str,str]: + """Parses a dlp_job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/dlpJobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def finding_path(project: str,location: str,finding: str,) -> str: + """Returns a fully-qualified finding string.""" + return "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) + + @staticmethod + def parse_finding_path(path: str) -> Dict[str,str]: + """Parses a finding path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/findings/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def inspect_template_path(organization: str,inspect_template: str,) -> str: + """Returns a fully-qualified inspect_template string.""" + return "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) + + @staticmethod + def parse_inspect_template_path(path: str) -> Dict[str,str]: + """Parses a inspect_template path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/inspectTemplates/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def job_trigger_path(project: str,job_trigger: str,) -> str: + """Returns a fully-qualified job_trigger string.""" + return "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) + + @staticmethod + def parse_job_trigger_path(path: str) -> Dict[str,str]: + """Parses a job_trigger path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/jobTriggers/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def stored_info_type_path(organization: str,stored_info_type: str,) -> str: + """Returns a fully-qualified stored_info_type string.""" + return "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) + + @staticmethod + def parse_stored_info_type_path(path: str) -> Dict[str,str]: + """Parses a stored_info_type path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/storedInfoTypes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DlpServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dlp service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DlpServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DlpServiceTransport): + # transport is a DlpServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def inspect_content(self, + request: Optional[Union[dlp.InspectContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectContentResponse: + r"""Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_inspect_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.InspectContentRequest( + ) + + # Make the request + response = client.inspect_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.InspectContentRequest, dict]): + The request object. Request to search for potentially + sensitive info in a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectContentResponse: + Results of inspecting an item. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.InspectContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.InspectContentRequest): + request = dlp.InspectContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.inspect_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def redact_image(self, + request: Optional[Union[dlp.RedactImageRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.RedactImageResponse: + r"""Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_redact_image(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.RedactImageRequest( + ) + + # Make the request + response = client.redact_image(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.RedactImageRequest, dict]): + The request object. Request to search for potentially + sensitive info in an image and redact it by covering it + with a colored rectangle. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.RedactImageResponse: + Results of redacting an image. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.RedactImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.RedactImageRequest): + request = dlp.RedactImageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.redact_image] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def deidentify_content(self, + request: Optional[Union[dlp.DeidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyContentResponse: + r"""De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_deidentify_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeidentifyContentRequest( + ) + + # Make the request + response = client.deidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.DeidentifyContentRequest, dict]): + The request object. Request to de-identify a + ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeidentifyContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeidentifyContentRequest): + request = dlp.DeidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.deidentify_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def reidentify_content(self, + request: Optional[Union[dlp.ReidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ReidentifyContentResponse: + r"""Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_reidentify_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ReidentifyContentRequest( + parent="parent_value", + ) + + # Make the request + response = client.reidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ReidentifyContentRequest, dict]): + The request object. Request to re-identify an item. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ReidentifyContentResponse: + Results of re-identifying an item. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ReidentifyContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ReidentifyContentRequest): + request = dlp.ReidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reidentify_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_info_types(self, + request: Optional[Union[dlp.ListInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListInfoTypesResponse: + r"""Returns a list of the sensitive information types + that DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_info_types(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListInfoTypesRequest( + ) + + # Make the request + response = client.list_info_types(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListInfoTypesRequest, dict]): + The request object. Request for the list of infoTypes. + parent (str): + The parent resource name. + + The format of this value is as follows: + + :: + + locations/LOCATION_ID + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListInfoTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListInfoTypesRequest): + request = dlp.ListInfoTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_info_types] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_inspect_template(self, + request: Optional[Union[dlp.CreateInspectTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/dlp/docs/creating-templates to + learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateInspectTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateInspectTemplateRequest, dict]): + The request object. Request message for + CreateInspectTemplate. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + Required. The InspectTemplate to + create. + + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateInspectTemplateRequest): + request = dlp.CreateInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_template is not None: + request.inspect_template = inspect_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_inspect_template(self, + request: Optional[Union[dlp.UpdateInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.update_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateInspectTemplateRequest, dict]): + The request object. Request message for + UpdateInspectTemplate. + name (str): + Required. Resource name of organization and + inspectTemplate to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + New InspectTemplate value. + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, inspect_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateInspectTemplateRequest): + request = dlp.UpdateInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if inspect_template is not None: + request.inspect_template = inspect_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_inspect_template(self, + request: Optional[Union[dlp.GetInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetInspectTemplateRequest, dict]): + The request object. Request message for + GetInspectTemplate. + name (str): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetInspectTemplateRequest): + request = dlp.GetInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_inspect_templates(self, + request: Optional[Union[dlp.ListInspectTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInspectTemplatesPager: + r"""Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_inspect_templates(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListInspectTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inspect_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListInspectTemplatesRequest, dict]): + The request object. Request message for + ListInspectTemplates. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager: + Response message for + ListInspectTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListInspectTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListInspectTemplatesRequest): + request = dlp.ListInspectTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_inspect_templates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInspectTemplatesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_inspect_template(self, + request: Optional[Union[dlp.DeleteInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteInspectTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_inspect_template(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteInspectTemplateRequest, dict]): + The request object. Request message for + DeleteInspectTemplate. + name (str): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteInspectTemplateRequest): + request = dlp.DeleteInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_deidentify_template(self, + request: Optional[Union[dlp.CreateDeidentifyTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDeidentifyTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest, dict]): + The request object. Request message for + CreateDeidentifyTemplate. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + Required. The DeidentifyTemplate to + create. + + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deidentify_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateDeidentifyTemplateRequest): + request = dlp.CreateDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deidentify_template is not None: + request.deidentify_template = deidentify_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_deidentify_template(self, + request: Optional[Union[dlp.UpdateDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Updates the DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.update_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest, dict]): + The request object. Request message for + UpdateDeidentifyTemplate. + name (str): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + New DeidentifyTemplate value. + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, deidentify_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateDeidentifyTemplateRequest): + request = dlp.UpdateDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if deidentify_template is not None: + request.deidentify_template = deidentify_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_deidentify_template(self, + request: Optional[Union[dlp.GetDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Gets a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest, dict]): + The request object. Request message for + GetDeidentifyTemplate. + name (str): + Required. Resource name of the organization and + deidentify template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetDeidentifyTemplateRequest): + request = dlp.GetDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_deidentify_templates(self, + request: Optional[Union[dlp.ListDeidentifyTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeidentifyTemplatesPager: + r"""Lists DeidentifyTemplates. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_deidentify_templates(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDeidentifyTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deidentify_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest, dict]): + The request object. Request message for + ListDeidentifyTemplates. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager: + Response message for + ListDeidentifyTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListDeidentifyTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListDeidentifyTemplatesRequest): + request = dlp.ListDeidentifyTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_deidentify_templates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDeidentifyTemplatesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_deidentify_template(self, + request: Optional[Union[dlp.DeleteDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_deidentify_template(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest, dict]): + The request object. Request message for + DeleteDeidentifyTemplate. + name (str): + Required. Resource name of the organization and + deidentify template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteDeidentifyTemplateRequest): + request = dlp.DeleteDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_job_trigger(self, + request: Optional[Union[dlp.CreateJobTriggerRequest, dict]] = None, + *, + parent: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + job_trigger = dlp_v2.JobTrigger() + job_trigger.status = "CANCELLED" + + request = dlp_v2.CreateJobTriggerRequest( + parent="parent_value", + job_trigger=job_trigger, + ) + + # Make the request + response = client.create_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateJobTriggerRequest, dict]): + The request object. Request message for + CreateJobTrigger. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + Required. The JobTrigger to create. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job_trigger]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateJobTriggerRequest): + request = dlp.CreateJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job_trigger is not None: + request.job_trigger = job_trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_job_trigger(self, + request: Optional[Union[dlp.UpdateJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Updates a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.update_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateJobTriggerRequest, dict]): + The request object. Request message for + UpdateJobTrigger. + name (str): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + New JobTrigger value. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, job_trigger, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateJobTriggerRequest): + request = dlp.UpdateJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if job_trigger is not None: + request.job_trigger = job_trigger + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def hybrid_inspect_job_trigger(self, + request: Optional[Union[dlp.HybridInspectJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_hybrid_inspect_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.hybrid_inspect_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest, dict]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (str): + Required. Resource name of the trigger to execute a + hybrid inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.HybridInspectJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.HybridInspectJobTriggerRequest): + request = dlp.HybridInspectJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job_trigger(self, + request: Optional[Union[dlp.GetJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Gets a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.get_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetJobTriggerRequest, dict]): + The request object. Request message for GetJobTrigger. + name (str): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetJobTriggerRequest): + request = dlp.GetJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_job_triggers(self, + request: Optional[Union[dlp.ListJobTriggersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobTriggersPager: + r"""Lists job triggers. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_job_triggers(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListJobTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_triggers(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListJobTriggersRequest, dict]): + The request object. Request message for ListJobTriggers. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager: + Response message for ListJobTriggers. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListJobTriggersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListJobTriggersRequest): + request = dlp.ListJobTriggersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_job_triggers] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobTriggersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_job_trigger(self, + request: Optional[Union[dlp.DeleteJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteJobTriggerRequest( + name="name_value", + ) + + # Make the request + client.delete_job_trigger(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteJobTriggerRequest, dict]): + The request object. Request message for + DeleteJobTrigger. + name (str): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteJobTriggerRequest): + request = dlp.DeleteJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def activate_job_trigger(self, + request: Optional[Union[dlp.ActivateJobTriggerRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_activate_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ActivateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.activate_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ActivateJobTriggerRequest, dict]): + The request object. Request message for + ActivateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ActivateJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ActivateJobTriggerRequest): + request = dlp.ActivateJobTriggerRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.activate_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_dlp_job(self, + request: Optional[Union[dlp.CreateDlpJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_job: Optional[dlp.InspectJobConfig] = None, + risk_job: Optional[dlp.RiskAnalysisJobConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDlpJobRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateDlpJobRequest, dict]): + The request object. Request message for + CreateDlpJobRequest. Used to initiate long running jobs + such as calculating risk metrics or inspecting Google + Cloud Storage. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): + An inspection job scans a storage + repository for InfoTypes. + + This corresponds to the ``inspect_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): + A risk analysis job calculates + re-identification risk metrics for a + BigQuery table. + + This corresponds to the ``risk_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_job, risk_job]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateDlpJobRequest): + request = dlp.CreateDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_job is not None: + request.inspect_job = inspect_job + if risk_job is not None: + request.risk_job = risk_job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_dlp_jobs(self, + request: Optional[Union[dlp.ListDlpJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDlpJobsPager: + r"""Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_dlp_jobs(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDlpJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dlp_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListDlpJobsRequest, dict]): + The request object. The request message for listing DLP + jobs. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager: + The response message for listing DLP + jobs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListDlpJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListDlpJobsRequest): + request = dlp.ListDlpJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_dlp_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDlpJobsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_dlp_job(self, + request: Optional[Union[dlp.GetDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDlpJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetDlpJobRequest, dict]): + The request object. The request message for + [DlpJobs.GetDlpJob][]. + name (str): + Required. The name of the DlpJob + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetDlpJobRequest): + request = dlp.GetDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_dlp_job(self, + request: Optional[Union[dlp.DeleteDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDlpJobRequest( + name="name_value", + ) + + # Make the request + client.delete_dlp_job(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteDlpJobRequest, dict]): + The request object. The request message for deleting a + DLP job. + name (str): + Required. The name of the DlpJob + resource to be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteDlpJobRequest): + request = dlp.DeleteDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_dlp_job(self, + request: Optional[Union[dlp.CancelDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_cancel_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CancelDlpJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_dlp_job(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.CancelDlpJobRequest, dict]): + The request object. The request message for canceling a + DLP job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CancelDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CancelDlpJobRequest): + request = dlp.CancelDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_stored_info_type(self, + request: Optional[Union[dlp.CreateStoredInfoTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateStoredInfoTypeRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest, dict]): + The request object. Request message for + CreateStoredInfoType. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Required. Configuration of the + storedInfoType to create. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, config]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateStoredInfoTypeRequest): + request = dlp.CreateStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if config is not None: + request.config = config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_stored_info_type(self, + request: Optional[Union[dlp.UpdateStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = client.update_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest, dict]): + The request object. Request message for + UpdateStoredInfoType. + name (str): + Required. Resource name of organization and + storedInfoType to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Updated configuration for the + storedInfoType. If not provided, a new + version of the storedInfoType will be + created with the existing configuration. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateStoredInfoTypeRequest): + request = dlp.UpdateStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if config is not None: + request.config = config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_stored_info_type(self, + request: Optional[Union[dlp.GetStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Gets a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetStoredInfoTypeRequest, dict]): + The request object. Request message for + GetStoredInfoType. + name (str): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetStoredInfoTypeRequest): + request = dlp.GetStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_stored_info_types(self, + request: Optional[Union[dlp.ListStoredInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListStoredInfoTypesPager: + r"""Lists stored infoTypes. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_stored_info_types(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListStoredInfoTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_stored_info_types(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListStoredInfoTypesRequest, dict]): + The request object. Request message for + ListStoredInfoTypes. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager: + Response message for + ListStoredInfoTypes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListStoredInfoTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListStoredInfoTypesRequest): + request = dlp.ListStoredInfoTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_stored_info_types] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListStoredInfoTypesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_stored_info_type(self, + request: Optional[Union[dlp.DeleteStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + client.delete_stored_info_type(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest, dict]): + The request object. Request message for + DeleteStoredInfoType. + name (str): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteStoredInfoTypeRequest): + request = dlp.DeleteStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def hybrid_inspect_dlp_job(self, + request: Optional[Union[dlp.HybridInspectDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_hybrid_inspect_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectDlpJobRequest( + name="name_value", + ) + + # Make the request + response = client.hybrid_inspect_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.HybridInspectDlpJobRequest, dict]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (str): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.HybridInspectDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.HybridInspectDlpJobRequest): + request = dlp.HybridInspectDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def finish_dlp_job(self, + request: Optional[Union[dlp.FinishDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_finish_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.FinishDlpJobRequest( + name="name_value", + ) + + # Make the request + client.finish_dlp_job(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.FinishDlpJobRequest, dict]): + The request object. The request message for finishing a + DLP hybrid job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.FinishDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.FinishDlpJobRequest): + request = dlp.FinishDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.finish_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "DlpServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DlpServiceClient", +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py new file mode 100644 index 00000000..73a0e48f --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py @@ -0,0 +1,623 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.dlp_v2.types import dlp + + +class ListInspectTemplatesPager: + """A pager for iterating through ``list_inspect_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``inspect_templates`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInspectTemplates`` requests and continue to iterate + through the ``inspect_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListInspectTemplatesResponse], + request: dlp.ListInspectTemplatesRequest, + response: dlp.ListInspectTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListInspectTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListInspectTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.InspectTemplate]: + for page in self.pages: + yield from page.inspect_templates + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInspectTemplatesAsyncPager: + """A pager for iterating through ``list_inspect_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``inspect_templates`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInspectTemplates`` requests and continue to iterate + through the ``inspect_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListInspectTemplatesResponse]], + request: dlp.ListInspectTemplatesRequest, + response: dlp.ListInspectTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListInspectTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListInspectTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.InspectTemplate]: + async def async_generator(): + async for page in self.pages: + for response in page.inspect_templates: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDeidentifyTemplatesPager: + """A pager for iterating through ``list_deidentify_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deidentify_templates`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDeidentifyTemplates`` requests and continue to iterate + through the ``deidentify_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListDeidentifyTemplatesResponse], + request: dlp.ListDeidentifyTemplatesRequest, + response: dlp.ListDeidentifyTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDeidentifyTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListDeidentifyTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.DeidentifyTemplate]: + for page in self.pages: + yield from page.deidentify_templates + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDeidentifyTemplatesAsyncPager: + """A pager for iterating through ``list_deidentify_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``deidentify_templates`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDeidentifyTemplates`` requests and continue to iterate + through the ``deidentify_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListDeidentifyTemplatesResponse]], + request: dlp.ListDeidentifyTemplatesRequest, + response: dlp.ListDeidentifyTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDeidentifyTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListDeidentifyTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.DeidentifyTemplate]: + async def async_generator(): + async for page in self.pages: + for response in page.deidentify_templates: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobTriggersPager: + """A pager for iterating through ``list_job_triggers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``job_triggers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobTriggers`` requests and continue to iterate + through the ``job_triggers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListJobTriggersResponse], + request: dlp.ListJobTriggersRequest, + response: dlp.ListJobTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListJobTriggersRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListJobTriggersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListJobTriggersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListJobTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.JobTrigger]: + for page in self.pages: + yield from page.job_triggers + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobTriggersAsyncPager: + """A pager for iterating through ``list_job_triggers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``job_triggers`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobTriggers`` requests and continue to iterate + through the ``job_triggers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListJobTriggersResponse]], + request: dlp.ListJobTriggersRequest, + response: dlp.ListJobTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListJobTriggersRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListJobTriggersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListJobTriggersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListJobTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.JobTrigger]: + async def async_generator(): + async for page in self.pages: + for response in page.job_triggers: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDlpJobsPager: + """A pager for iterating through ``list_dlp_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDlpJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListDlpJobsResponse], + request: dlp.ListDlpJobsRequest, + response: dlp.ListDlpJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDlpJobsRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDlpJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDlpJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListDlpJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.DlpJob]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDlpJobsAsyncPager: + """A pager for iterating through ``list_dlp_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDlpJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListDlpJobsResponse]], + request: dlp.ListDlpJobsRequest, + response: dlp.ListDlpJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDlpJobsRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDlpJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDlpJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListDlpJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.DlpJob]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListStoredInfoTypesPager: + """A pager for iterating through ``list_stored_info_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``stored_info_types`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListStoredInfoTypes`` requests and continue to iterate + through the ``stored_info_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListStoredInfoTypesResponse], + request: dlp.ListStoredInfoTypesRequest, + response: dlp.ListStoredInfoTypesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListStoredInfoTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListStoredInfoTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.StoredInfoType]: + for page in self.pages: + yield from page.stored_info_types + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListStoredInfoTypesAsyncPager: + """A pager for iterating through ``list_stored_info_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``stored_info_types`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListStoredInfoTypes`` requests and continue to iterate + through the ``stored_info_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListStoredInfoTypesResponse]], + request: dlp.ListStoredInfoTypesRequest, + response: dlp.ListStoredInfoTypesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListStoredInfoTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListStoredInfoTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.StoredInfoType]: + async def async_generator(): + async for page in self.pages: + for response in page.stored_info_types: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py new file mode 100644 index 00000000..df9b4279 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DlpServiceTransport +from .grpc import DlpServiceGrpcTransport +from .grpc_asyncio import DlpServiceGrpcAsyncIOTransport +from .rest import DlpServiceRestTransport +from .rest import DlpServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] +_transport_registry['grpc'] = DlpServiceGrpcTransport +_transport_registry['grpc_asyncio'] = DlpServiceGrpcAsyncIOTransport +_transport_registry['rest'] = DlpServiceRestTransport + +__all__ = ( + 'DlpServiceTransport', + 'DlpServiceGrpcTransport', + 'DlpServiceGrpcAsyncIOTransport', + 'DlpServiceRestTransport', + 'DlpServiceRestInterceptor', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py new file mode 100644 index 00000000..e90545e1 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py @@ -0,0 +1,752 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dlp_v2 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dlp_v2.types import dlp +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class DlpServiceTransport(abc.ABC): + """Abstract transport class for DlpService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dlp.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.inspect_content: gapic_v1.method.wrap_method( + self.inspect_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.redact_image: gapic_v1.method.wrap_method( + self.redact_image, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.deidentify_content: gapic_v1.method.wrap_method( + self.deidentify_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.reidentify_content: gapic_v1.method.wrap_method( + self.reidentify_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_info_types: gapic_v1.method.wrap_method( + self.list_info_types, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_inspect_template: gapic_v1.method.wrap_method( + self.create_inspect_template, + default_timeout=300.0, + client_info=client_info, + ), + self.update_inspect_template: gapic_v1.method.wrap_method( + self.update_inspect_template, + default_timeout=300.0, + client_info=client_info, + ), + self.get_inspect_template: gapic_v1.method.wrap_method( + self.get_inspect_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_inspect_templates: gapic_v1.method.wrap_method( + self.list_inspect_templates, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_inspect_template: gapic_v1.method.wrap_method( + self.delete_inspect_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_deidentify_template: gapic_v1.method.wrap_method( + self.create_deidentify_template, + default_timeout=300.0, + client_info=client_info, + ), + self.update_deidentify_template: gapic_v1.method.wrap_method( + self.update_deidentify_template, + default_timeout=300.0, + client_info=client_info, + ), + self.get_deidentify_template: gapic_v1.method.wrap_method( + self.get_deidentify_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_deidentify_templates: gapic_v1.method.wrap_method( + self.list_deidentify_templates, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_deidentify_template: gapic_v1.method.wrap_method( + self.delete_deidentify_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_job_trigger: gapic_v1.method.wrap_method( + self.create_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.update_job_trigger: gapic_v1.method.wrap_method( + self.update_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.hybrid_inspect_job_trigger: gapic_v1.method.wrap_method( + self.hybrid_inspect_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.get_job_trigger: gapic_v1.method.wrap_method( + self.get_job_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_job_triggers: gapic_v1.method.wrap_method( + self.list_job_triggers, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_job_trigger: gapic_v1.method.wrap_method( + self.delete_job_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.activate_job_trigger: gapic_v1.method.wrap_method( + self.activate_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.create_dlp_job: gapic_v1.method.wrap_method( + self.create_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.list_dlp_jobs: gapic_v1.method.wrap_method( + self.list_dlp_jobs, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.get_dlp_job: gapic_v1.method.wrap_method( + self.get_dlp_job, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_dlp_job: gapic_v1.method.wrap_method( + self.delete_dlp_job, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.cancel_dlp_job: gapic_v1.method.wrap_method( + self.cancel_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.create_stored_info_type: gapic_v1.method.wrap_method( + self.create_stored_info_type, + default_timeout=300.0, + client_info=client_info, + ), + self.update_stored_info_type: gapic_v1.method.wrap_method( + self.update_stored_info_type, + default_timeout=300.0, + client_info=client_info, + ), + self.get_stored_info_type: gapic_v1.method.wrap_method( + self.get_stored_info_type, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_stored_info_types: gapic_v1.method.wrap_method( + self.list_stored_info_types, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_stored_info_type: gapic_v1.method.wrap_method( + self.delete_stored_info_type, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.hybrid_inspect_dlp_job: gapic_v1.method.wrap_method( + self.hybrid_inspect_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.finish_dlp_job: gapic_v1.method.wrap_method( + self.finish_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + Union[ + dlp.InspectContentResponse, + Awaitable[dlp.InspectContentResponse] + ]]: + raise NotImplementedError() + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + Union[ + dlp.RedactImageResponse, + Awaitable[dlp.RedactImageResponse] + ]]: + raise NotImplementedError() + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + Union[ + dlp.DeidentifyContentResponse, + Awaitable[dlp.DeidentifyContentResponse] + ]]: + raise NotImplementedError() + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + Union[ + dlp.ReidentifyContentResponse, + Awaitable[dlp.ReidentifyContentResponse] + ]]: + raise NotImplementedError() + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + Union[ + dlp.ListInfoTypesResponse, + Awaitable[dlp.ListInfoTypesResponse] + ]]: + raise NotImplementedError() + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + Union[ + dlp.InspectTemplate, + Awaitable[dlp.InspectTemplate] + ]]: + raise NotImplementedError() + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + Union[ + dlp.InspectTemplate, + Awaitable[dlp.InspectTemplate] + ]]: + raise NotImplementedError() + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + Union[ + dlp.InspectTemplate, + Awaitable[dlp.InspectTemplate] + ]]: + raise NotImplementedError() + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + Union[ + dlp.ListInspectTemplatesResponse, + Awaitable[dlp.ListInspectTemplatesResponse] + ]]: + raise NotImplementedError() + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + Union[ + dlp.DeidentifyTemplate, + Awaitable[dlp.DeidentifyTemplate] + ]]: + raise NotImplementedError() + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + Union[ + dlp.DeidentifyTemplate, + Awaitable[dlp.DeidentifyTemplate] + ]]: + raise NotImplementedError() + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + Union[ + dlp.DeidentifyTemplate, + Awaitable[dlp.DeidentifyTemplate] + ]]: + raise NotImplementedError() + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + Union[ + dlp.ListDeidentifyTemplatesResponse, + Awaitable[dlp.ListDeidentifyTemplatesResponse] + ]]: + raise NotImplementedError() + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + Union[ + dlp.JobTrigger, + Awaitable[dlp.JobTrigger] + ]]: + raise NotImplementedError() + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + Union[ + dlp.JobTrigger, + Awaitable[dlp.JobTrigger] + ]]: + raise NotImplementedError() + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + Union[ + dlp.HybridInspectResponse, + Awaitable[dlp.HybridInspectResponse] + ]]: + raise NotImplementedError() + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + Union[ + dlp.JobTrigger, + Awaitable[dlp.JobTrigger] + ]]: + raise NotImplementedError() + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + Union[ + dlp.ListJobTriggersResponse, + Awaitable[dlp.ListJobTriggersResponse] + ]]: + raise NotImplementedError() + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + Union[ + dlp.DlpJob, + Awaitable[dlp.DlpJob] + ]]: + raise NotImplementedError() + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + Union[ + dlp.DlpJob, + Awaitable[dlp.DlpJob] + ]]: + raise NotImplementedError() + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + Union[ + dlp.ListDlpJobsResponse, + Awaitable[dlp.ListDlpJobsResponse] + ]]: + raise NotImplementedError() + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + Union[ + dlp.DlpJob, + Awaitable[dlp.DlpJob] + ]]: + raise NotImplementedError() + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + Union[ + dlp.StoredInfoType, + Awaitable[dlp.StoredInfoType] + ]]: + raise NotImplementedError() + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + Union[ + dlp.StoredInfoType, + Awaitable[dlp.StoredInfoType] + ]]: + raise NotImplementedError() + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + Union[ + dlp.StoredInfoType, + Awaitable[dlp.StoredInfoType] + ]]: + raise NotImplementedError() + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + Union[ + dlp.ListStoredInfoTypesResponse, + Awaitable[dlp.ListStoredInfoTypesResponse] + ]]: + raise NotImplementedError() + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + Union[ + dlp.HybridInspectResponse, + Awaitable[dlp.HybridInspectResponse] + ]]: + raise NotImplementedError() + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'DlpServiceTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py new file mode 100644 index 00000000..d95be0ba --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py @@ -0,0 +1,1262 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dlp_v2.types import dlp +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO + + +class DlpServiceGrpcTransport(DlpServiceTransport): + """gRPC backend transport for DlpService. + + The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + dlp.InspectContentResponse]: + r"""Return a callable for the inspect content method over gRPC. + + Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + Returns: + Callable[[~.InspectContentRequest], + ~.InspectContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'inspect_content' not in self._stubs: + self._stubs['inspect_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/InspectContent', + request_serializer=dlp.InspectContentRequest.serialize, + response_deserializer=dlp.InspectContentResponse.deserialize, + ) + return self._stubs['inspect_content'] + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + dlp.RedactImageResponse]: + r"""Return a callable for the redact image method over gRPC. + + Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.RedactImageRequest], + ~.RedactImageResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'redact_image' not in self._stubs: + self._stubs['redact_image'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/RedactImage', + request_serializer=dlp.RedactImageRequest.serialize, + response_deserializer=dlp.RedactImageResponse.deserialize, + ) + return self._stubs['redact_image'] + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + dlp.DeidentifyContentResponse]: + r"""Return a callable for the deidentify content method over gRPC. + + De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.DeidentifyContentRequest], + ~.DeidentifyContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'deidentify_content' not in self._stubs: + self._stubs['deidentify_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeidentifyContent', + request_serializer=dlp.DeidentifyContentRequest.serialize, + response_deserializer=dlp.DeidentifyContentResponse.deserialize, + ) + return self._stubs['deidentify_content'] + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + dlp.ReidentifyContentResponse]: + r"""Return a callable for the reidentify content method over gRPC. + + Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + Returns: + Callable[[~.ReidentifyContentRequest], + ~.ReidentifyContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'reidentify_content' not in self._stubs: + self._stubs['reidentify_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ReidentifyContent', + request_serializer=dlp.ReidentifyContentRequest.serialize, + response_deserializer=dlp.ReidentifyContentResponse.deserialize, + ) + return self._stubs['reidentify_content'] + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + dlp.ListInfoTypesResponse]: + r"""Return a callable for the list info types method over gRPC. + + Returns a list of the sensitive information types + that DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + Returns: + Callable[[~.ListInfoTypesRequest], + ~.ListInfoTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_info_types' not in self._stubs: + self._stubs['list_info_types'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInfoTypes', + request_serializer=dlp.ListInfoTypesRequest.serialize, + response_deserializer=dlp.ListInfoTypesResponse.deserialize, + ) + return self._stubs['list_info_types'] + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + dlp.InspectTemplate]: + r"""Return a callable for the create inspect template method over gRPC. + + Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/dlp/docs/creating-templates to + learn more. + + Returns: + Callable[[~.CreateInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_inspect_template' not in self._stubs: + self._stubs['create_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', + request_serializer=dlp.CreateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['create_inspect_template'] + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + dlp.InspectTemplate]: + r"""Return a callable for the update inspect template method over gRPC. + + Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.UpdateInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_inspect_template' not in self._stubs: + self._stubs['update_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', + request_serializer=dlp.UpdateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['update_inspect_template'] + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + dlp.InspectTemplate]: + r"""Return a callable for the get inspect template method over gRPC. + + Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.GetInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_inspect_template' not in self._stubs: + self._stubs['get_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', + request_serializer=dlp.GetInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['get_inspect_template'] + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + dlp.ListInspectTemplatesResponse]: + r"""Return a callable for the list inspect templates method over gRPC. + + Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.ListInspectTemplatesRequest], + ~.ListInspectTemplatesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_inspect_templates' not in self._stubs: + self._stubs['list_inspect_templates'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', + request_serializer=dlp.ListInspectTemplatesRequest.serialize, + response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, + ) + return self._stubs['list_inspect_templates'] + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete inspect template method over gRPC. + + Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.DeleteInspectTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_inspect_template' not in self._stubs: + self._stubs['delete_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', + request_serializer=dlp.DeleteInspectTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_inspect_template'] + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + r"""Return a callable for the create deidentify template method over gRPC. + + Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.CreateDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_deidentify_template' not in self._stubs: + self._stubs['create_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', + request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['create_deidentify_template'] + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + r"""Return a callable for the update deidentify template method over gRPC. + + Updates the DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.UpdateDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_deidentify_template' not in self._stubs: + self._stubs['update_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', + request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['update_deidentify_template'] + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + r"""Return a callable for the get deidentify template method over gRPC. + + Gets a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.GetDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_deidentify_template' not in self._stubs: + self._stubs['get_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', + request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['get_deidentify_template'] + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + dlp.ListDeidentifyTemplatesResponse]: + r"""Return a callable for the list deidentify templates method over gRPC. + + Lists DeidentifyTemplates. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.ListDeidentifyTemplatesRequest], + ~.ListDeidentifyTemplatesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_deidentify_templates' not in self._stubs: + self._stubs['list_deidentify_templates'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', + request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, + response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, + ) + return self._stubs['list_deidentify_templates'] + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete deidentify template method over gRPC. + + Deletes a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.DeleteDeidentifyTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_deidentify_template' not in self._stubs: + self._stubs['delete_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', + request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_deidentify_template'] + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + dlp.JobTrigger]: + r"""Return a callable for the create job trigger method over gRPC. + + Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.CreateJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job_trigger' not in self._stubs: + self._stubs['create_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', + request_serializer=dlp.CreateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['create_job_trigger'] + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + dlp.JobTrigger]: + r"""Return a callable for the update job trigger method over gRPC. + + Updates a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.UpdateJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_job_trigger' not in self._stubs: + self._stubs['update_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', + request_serializer=dlp.UpdateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['update_job_trigger'] + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + dlp.HybridInspectResponse]: + r"""Return a callable for the hybrid inspect job trigger method over gRPC. + + Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + Returns: + Callable[[~.HybridInspectJobTriggerRequest], + ~.HybridInspectResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_job_trigger' not in self._stubs: + self._stubs['hybrid_inspect_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', + request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_job_trigger'] + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + dlp.JobTrigger]: + r"""Return a callable for the get job trigger method over gRPC. + + Gets a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.GetJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_trigger' not in self._stubs: + self._stubs['get_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetJobTrigger', + request_serializer=dlp.GetJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['get_job_trigger'] + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + dlp.ListJobTriggersResponse]: + r"""Return a callable for the list job triggers method over gRPC. + + Lists job triggers. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.ListJobTriggersRequest], + ~.ListJobTriggersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_job_triggers' not in self._stubs: + self._stubs['list_job_triggers'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListJobTriggers', + request_serializer=dlp.ListJobTriggersRequest.serialize, + response_deserializer=dlp.ListJobTriggersResponse.deserialize, + ) + return self._stubs['list_job_triggers'] + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete job trigger method over gRPC. + + Deletes a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.DeleteJobTriggerRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job_trigger' not in self._stubs: + self._stubs['delete_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', + request_serializer=dlp.DeleteJobTriggerRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job_trigger'] + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + dlp.DlpJob]: + r"""Return a callable for the activate job trigger method over gRPC. + + Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + Returns: + Callable[[~.ActivateJobTriggerRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'activate_job_trigger' not in self._stubs: + self._stubs['activate_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', + request_serializer=dlp.ActivateJobTriggerRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['activate_job_trigger'] + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + dlp.DlpJob]: + r"""Return a callable for the create dlp job method over gRPC. + + Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.CreateDlpJobRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_dlp_job' not in self._stubs: + self._stubs['create_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDlpJob', + request_serializer=dlp.CreateDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['create_dlp_job'] + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + dlp.ListDlpJobsResponse]: + r"""Return a callable for the list dlp jobs method over gRPC. + + Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.ListDlpJobsRequest], + ~.ListDlpJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_dlp_jobs' not in self._stubs: + self._stubs['list_dlp_jobs'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDlpJobs', + request_serializer=dlp.ListDlpJobsRequest.serialize, + response_deserializer=dlp.ListDlpJobsResponse.deserialize, + ) + return self._stubs['list_dlp_jobs'] + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + dlp.DlpJob]: + r"""Return a callable for the get dlp job method over gRPC. + + Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.GetDlpJobRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_dlp_job' not in self._stubs: + self._stubs['get_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDlpJob', + request_serializer=dlp.GetDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['get_dlp_job'] + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete dlp job method over gRPC. + + Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.DeleteDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_dlp_job' not in self._stubs: + self._stubs['delete_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', + request_serializer=dlp.DeleteDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_dlp_job'] + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the cancel dlp job method over gRPC. + + Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.CancelDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_dlp_job' not in self._stubs: + self._stubs['cancel_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CancelDlpJob', + request_serializer=dlp.CancelDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['cancel_dlp_job'] + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + dlp.StoredInfoType]: + r"""Return a callable for the create stored info type method over gRPC. + + Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.CreateStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_stored_info_type' not in self._stubs: + self._stubs['create_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', + request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['create_stored_info_type'] + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + dlp.StoredInfoType]: + r"""Return a callable for the update stored info type method over gRPC. + + Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.UpdateStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_stored_info_type' not in self._stubs: + self._stubs['update_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', + request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['update_stored_info_type'] + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + dlp.StoredInfoType]: + r"""Return a callable for the get stored info type method over gRPC. + + Gets a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.GetStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_stored_info_type' not in self._stubs: + self._stubs['get_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', + request_serializer=dlp.GetStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['get_stored_info_type'] + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + dlp.ListStoredInfoTypesResponse]: + r"""Return a callable for the list stored info types method over gRPC. + + Lists stored infoTypes. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.ListStoredInfoTypesRequest], + ~.ListStoredInfoTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_stored_info_types' not in self._stubs: + self._stubs['list_stored_info_types'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', + request_serializer=dlp.ListStoredInfoTypesRequest.serialize, + response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, + ) + return self._stubs['list_stored_info_types'] + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete stored info type method over gRPC. + + Deletes a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.DeleteStoredInfoTypeRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_stored_info_type' not in self._stubs: + self._stubs['delete_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', + request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_stored_info_type'] + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + dlp.HybridInspectResponse]: + r"""Return a callable for the hybrid inspect dlp job method over gRPC. + + Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + Returns: + Callable[[~.HybridInspectDlpJobRequest], + ~.HybridInspectResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_dlp_job' not in self._stubs: + self._stubs['hybrid_inspect_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', + request_serializer=dlp.HybridInspectDlpJobRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_dlp_job'] + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the finish dlp job method over gRPC. + + Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + Returns: + Callable[[~.FinishDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'finish_dlp_job' not in self._stubs: + self._stubs['finish_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/FinishDlpJob', + request_serializer=dlp.FinishDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['finish_dlp_job'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'DlpServiceGrpcTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py new file mode 100644 index 00000000..03c8bf3c --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py @@ -0,0 +1,1261 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dlp_v2.types import dlp +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import DlpServiceGrpcTransport + + +class DlpServiceGrpcAsyncIOTransport(DlpServiceTransport): + """gRPC AsyncIO backend transport for DlpService. + + The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + Awaitable[dlp.InspectContentResponse]]: + r"""Return a callable for the inspect content method over gRPC. + + Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + Returns: + Callable[[~.InspectContentRequest], + Awaitable[~.InspectContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'inspect_content' not in self._stubs: + self._stubs['inspect_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/InspectContent', + request_serializer=dlp.InspectContentRequest.serialize, + response_deserializer=dlp.InspectContentResponse.deserialize, + ) + return self._stubs['inspect_content'] + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + Awaitable[dlp.RedactImageResponse]]: + r"""Return a callable for the redact image method over gRPC. + + Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.RedactImageRequest], + Awaitable[~.RedactImageResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'redact_image' not in self._stubs: + self._stubs['redact_image'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/RedactImage', + request_serializer=dlp.RedactImageRequest.serialize, + response_deserializer=dlp.RedactImageResponse.deserialize, + ) + return self._stubs['redact_image'] + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + Awaitable[dlp.DeidentifyContentResponse]]: + r"""Return a callable for the deidentify content method over gRPC. + + De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.DeidentifyContentRequest], + Awaitable[~.DeidentifyContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'deidentify_content' not in self._stubs: + self._stubs['deidentify_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeidentifyContent', + request_serializer=dlp.DeidentifyContentRequest.serialize, + response_deserializer=dlp.DeidentifyContentResponse.deserialize, + ) + return self._stubs['deidentify_content'] + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + Awaitable[dlp.ReidentifyContentResponse]]: + r"""Return a callable for the reidentify content method over gRPC. + + Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + Returns: + Callable[[~.ReidentifyContentRequest], + Awaitable[~.ReidentifyContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'reidentify_content' not in self._stubs: + self._stubs['reidentify_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ReidentifyContent', + request_serializer=dlp.ReidentifyContentRequest.serialize, + response_deserializer=dlp.ReidentifyContentResponse.deserialize, + ) + return self._stubs['reidentify_content'] + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + Awaitable[dlp.ListInfoTypesResponse]]: + r"""Return a callable for the list info types method over gRPC. + + Returns a list of the sensitive information types + that DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + Returns: + Callable[[~.ListInfoTypesRequest], + Awaitable[~.ListInfoTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_info_types' not in self._stubs: + self._stubs['list_info_types'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInfoTypes', + request_serializer=dlp.ListInfoTypesRequest.serialize, + response_deserializer=dlp.ListInfoTypesResponse.deserialize, + ) + return self._stubs['list_info_types'] + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the create inspect template method over gRPC. + + Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/dlp/docs/creating-templates to + learn more. + + Returns: + Callable[[~.CreateInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_inspect_template' not in self._stubs: + self._stubs['create_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', + request_serializer=dlp.CreateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['create_inspect_template'] + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the update inspect template method over gRPC. + + Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.UpdateInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_inspect_template' not in self._stubs: + self._stubs['update_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', + request_serializer=dlp.UpdateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['update_inspect_template'] + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the get inspect template method over gRPC. + + Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.GetInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_inspect_template' not in self._stubs: + self._stubs['get_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', + request_serializer=dlp.GetInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['get_inspect_template'] + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + Awaitable[dlp.ListInspectTemplatesResponse]]: + r"""Return a callable for the list inspect templates method over gRPC. + + Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.ListInspectTemplatesRequest], + Awaitable[~.ListInspectTemplatesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_inspect_templates' not in self._stubs: + self._stubs['list_inspect_templates'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', + request_serializer=dlp.ListInspectTemplatesRequest.serialize, + response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, + ) + return self._stubs['list_inspect_templates'] + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete inspect template method over gRPC. + + Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.DeleteInspectTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_inspect_template' not in self._stubs: + self._stubs['delete_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', + request_serializer=dlp.DeleteInspectTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_inspect_template'] + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + Awaitable[dlp.DeidentifyTemplate]]: + r"""Return a callable for the create deidentify template method over gRPC. + + Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.CreateDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_deidentify_template' not in self._stubs: + self._stubs['create_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', + request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['create_deidentify_template'] + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + Awaitable[dlp.DeidentifyTemplate]]: + r"""Return a callable for the update deidentify template method over gRPC. + + Updates the DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.UpdateDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_deidentify_template' not in self._stubs: + self._stubs['update_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', + request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['update_deidentify_template'] + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + Awaitable[dlp.DeidentifyTemplate]]: + r"""Return a callable for the get deidentify template method over gRPC. + + Gets a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.GetDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_deidentify_template' not in self._stubs: + self._stubs['get_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', + request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['get_deidentify_template'] + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + Awaitable[dlp.ListDeidentifyTemplatesResponse]]: + r"""Return a callable for the list deidentify templates method over gRPC. + + Lists DeidentifyTemplates. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.ListDeidentifyTemplatesRequest], + Awaitable[~.ListDeidentifyTemplatesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_deidentify_templates' not in self._stubs: + self._stubs['list_deidentify_templates'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', + request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, + response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, + ) + return self._stubs['list_deidentify_templates'] + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete deidentify template method over gRPC. + + Deletes a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.DeleteDeidentifyTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_deidentify_template' not in self._stubs: + self._stubs['delete_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', + request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_deidentify_template'] + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the create job trigger method over gRPC. + + Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.CreateJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job_trigger' not in self._stubs: + self._stubs['create_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', + request_serializer=dlp.CreateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['create_job_trigger'] + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the update job trigger method over gRPC. + + Updates a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.UpdateJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_job_trigger' not in self._stubs: + self._stubs['update_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', + request_serializer=dlp.UpdateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['update_job_trigger'] + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + Awaitable[dlp.HybridInspectResponse]]: + r"""Return a callable for the hybrid inspect job trigger method over gRPC. + + Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + Returns: + Callable[[~.HybridInspectJobTriggerRequest], + Awaitable[~.HybridInspectResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_job_trigger' not in self._stubs: + self._stubs['hybrid_inspect_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', + request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_job_trigger'] + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the get job trigger method over gRPC. + + Gets a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.GetJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_trigger' not in self._stubs: + self._stubs['get_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetJobTrigger', + request_serializer=dlp.GetJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['get_job_trigger'] + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + Awaitable[dlp.ListJobTriggersResponse]]: + r"""Return a callable for the list job triggers method over gRPC. + + Lists job triggers. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.ListJobTriggersRequest], + Awaitable[~.ListJobTriggersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_job_triggers' not in self._stubs: + self._stubs['list_job_triggers'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListJobTriggers', + request_serializer=dlp.ListJobTriggersRequest.serialize, + response_deserializer=dlp.ListJobTriggersResponse.deserialize, + ) + return self._stubs['list_job_triggers'] + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete job trigger method over gRPC. + + Deletes a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.DeleteJobTriggerRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job_trigger' not in self._stubs: + self._stubs['delete_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', + request_serializer=dlp.DeleteJobTriggerRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job_trigger'] + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + Awaitable[dlp.DlpJob]]: + r"""Return a callable for the activate job trigger method over gRPC. + + Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + Returns: + Callable[[~.ActivateJobTriggerRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'activate_job_trigger' not in self._stubs: + self._stubs['activate_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', + request_serializer=dlp.ActivateJobTriggerRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['activate_job_trigger'] + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + Awaitable[dlp.DlpJob]]: + r"""Return a callable for the create dlp job method over gRPC. + + Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.CreateDlpJobRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_dlp_job' not in self._stubs: + self._stubs['create_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDlpJob', + request_serializer=dlp.CreateDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['create_dlp_job'] + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + Awaitable[dlp.ListDlpJobsResponse]]: + r"""Return a callable for the list dlp jobs method over gRPC. + + Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.ListDlpJobsRequest], + Awaitable[~.ListDlpJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_dlp_jobs' not in self._stubs: + self._stubs['list_dlp_jobs'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDlpJobs', + request_serializer=dlp.ListDlpJobsRequest.serialize, + response_deserializer=dlp.ListDlpJobsResponse.deserialize, + ) + return self._stubs['list_dlp_jobs'] + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + Awaitable[dlp.DlpJob]]: + r"""Return a callable for the get dlp job method over gRPC. + + Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.GetDlpJobRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_dlp_job' not in self._stubs: + self._stubs['get_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDlpJob', + request_serializer=dlp.GetDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['get_dlp_job'] + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete dlp job method over gRPC. + + Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.DeleteDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_dlp_job' not in self._stubs: + self._stubs['delete_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', + request_serializer=dlp.DeleteDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_dlp_job'] + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the cancel dlp job method over gRPC. + + Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.CancelDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_dlp_job' not in self._stubs: + self._stubs['cancel_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CancelDlpJob', + request_serializer=dlp.CancelDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['cancel_dlp_job'] + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the create stored info type method over gRPC. + + Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.CreateStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_stored_info_type' not in self._stubs: + self._stubs['create_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', + request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['create_stored_info_type'] + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the update stored info type method over gRPC. + + Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.UpdateStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_stored_info_type' not in self._stubs: + self._stubs['update_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', + request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['update_stored_info_type'] + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the get stored info type method over gRPC. + + Gets a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.GetStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_stored_info_type' not in self._stubs: + self._stubs['get_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', + request_serializer=dlp.GetStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['get_stored_info_type'] + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + Awaitable[dlp.ListStoredInfoTypesResponse]]: + r"""Return a callable for the list stored info types method over gRPC. + + Lists stored infoTypes. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.ListStoredInfoTypesRequest], + Awaitable[~.ListStoredInfoTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_stored_info_types' not in self._stubs: + self._stubs['list_stored_info_types'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', + request_serializer=dlp.ListStoredInfoTypesRequest.serialize, + response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, + ) + return self._stubs['list_stored_info_types'] + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete stored info type method over gRPC. + + Deletes a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.DeleteStoredInfoTypeRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_stored_info_type' not in self._stubs: + self._stubs['delete_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', + request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_stored_info_type'] + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + Awaitable[dlp.HybridInspectResponse]]: + r"""Return a callable for the hybrid inspect dlp job method over gRPC. + + Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + Returns: + Callable[[~.HybridInspectDlpJobRequest], + Awaitable[~.HybridInspectResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_dlp_job' not in self._stubs: + self._stubs['hybrid_inspect_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', + request_serializer=dlp.HybridInspectDlpJobRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_dlp_job'] + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the finish dlp job method over gRPC. + + Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + Returns: + Callable[[~.FinishDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'finish_dlp_job' not in self._stubs: + self._stubs['finish_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/FinishDlpJob', + request_serializer=dlp.FinishDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['finish_dlp_job'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'DlpServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py new file mode 100644 index 00000000..789b6267 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py @@ -0,0 +1,4325 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.cloud.location import locations_pb2 # type: ignore +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dlp_v2.types import dlp +from google.protobuf import empty_pb2 # type: ignore + +from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DlpServiceRestInterceptor: + """Interceptor for DlpService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DlpServiceRestTransport. + + .. code-block:: python + class MyCustomDlpServiceInterceptor(DlpServiceRestInterceptor): + def pre_activate_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_activate_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_cancel_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_create_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_deidentify_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_dlp_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_inspect_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_stored_info_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_deidentify_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_deidentify_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_finish_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_deidentify_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_dlp_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_inspect_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_stored_info_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_hybrid_inspect_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_hybrid_inspect_dlp_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_hybrid_inspect_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_hybrid_inspect_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_inspect_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_inspect_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_deidentify_templates(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_deidentify_templates(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_dlp_jobs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_dlp_jobs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_info_types(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_info_types(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_inspect_templates(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_inspect_templates(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_job_triggers(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_job_triggers(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_stored_info_types(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_stored_info_types(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_redact_image(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_redact_image(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_reidentify_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_reidentify_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_deidentify_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_inspect_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_stored_info_type(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DlpServiceRestTransport(interceptor=MyCustomDlpServiceInterceptor()) + client = DlpServiceClient(transport=transport) + + + """ + def pre_activate_job_trigger(self, request: dlp.ActivateJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ActivateJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for activate_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_activate_job_trigger(self, response: dlp.DlpJob) -> dlp.DlpJob: + """Post-rpc interceptor for activate_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_cancel_dlp_job(self, request: dlp.CancelDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CancelDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_create_deidentify_template(self, request: dlp.CreateDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: + """Post-rpc interceptor for create_deidentify_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_create_dlp_job(self, request: dlp.CreateDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: + """Post-rpc interceptor for create_dlp_job + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_create_inspect_template(self, request: dlp.CreateInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateInspectTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: + """Post-rpc interceptor for create_inspect_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_create_job_trigger(self, request: dlp.CreateJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: + """Post-rpc interceptor for create_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_create_stored_info_type(self, request: dlp.CreateStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: + """Post-rpc interceptor for create_stored_info_type + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_deidentify_content(self, request: dlp.DeidentifyContentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeidentifyContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for deidentify_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_deidentify_content(self, response: dlp.DeidentifyContentResponse) -> dlp.DeidentifyContentResponse: + """Post-rpc interceptor for deidentify_content + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_delete_deidentify_template(self, request: dlp.DeleteDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_dlp_job(self, request: dlp.DeleteDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_inspect_template(self, request: dlp.DeleteInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteInspectTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_job_trigger(self, request: dlp.DeleteJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_stored_info_type(self, request: dlp.DeleteStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_finish_dlp_job(self, request: dlp.FinishDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.FinishDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for finish_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_get_deidentify_template(self, request: dlp.GetDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: + """Post-rpc interceptor for get_deidentify_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_get_dlp_job(self, request: dlp.GetDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: + """Post-rpc interceptor for get_dlp_job + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_get_inspect_template(self, request: dlp.GetInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetInspectTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: + """Post-rpc interceptor for get_inspect_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_get_job_trigger(self, request: dlp.GetJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: + """Post-rpc interceptor for get_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_get_stored_info_type(self, request: dlp.GetStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: + """Post-rpc interceptor for get_stored_info_type + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_hybrid_inspect_dlp_job(self, request: dlp.HybridInspectDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.HybridInspectDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for hybrid_inspect_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_hybrid_inspect_dlp_job(self, response: dlp.HybridInspectResponse) -> dlp.HybridInspectResponse: + """Post-rpc interceptor for hybrid_inspect_dlp_job + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_hybrid_inspect_job_trigger(self, request: dlp.HybridInspectJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.HybridInspectJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for hybrid_inspect_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_hybrid_inspect_job_trigger(self, response: dlp.HybridInspectResponse) -> dlp.HybridInspectResponse: + """Post-rpc interceptor for hybrid_inspect_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_inspect_content(self, request: dlp.InspectContentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.InspectContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for inspect_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_inspect_content(self, response: dlp.InspectContentResponse) -> dlp.InspectContentResponse: + """Post-rpc interceptor for inspect_content + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_list_deidentify_templates(self, request: dlp.ListDeidentifyTemplatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListDeidentifyTemplatesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_deidentify_templates + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_deidentify_templates(self, response: dlp.ListDeidentifyTemplatesResponse) -> dlp.ListDeidentifyTemplatesResponse: + """Post-rpc interceptor for list_deidentify_templates + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_list_dlp_jobs(self, request: dlp.ListDlpJobsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListDlpJobsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_dlp_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_dlp_jobs(self, response: dlp.ListDlpJobsResponse) -> dlp.ListDlpJobsResponse: + """Post-rpc interceptor for list_dlp_jobs + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_list_info_types(self, request: dlp.ListInfoTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListInfoTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_info_types + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_info_types(self, response: dlp.ListInfoTypesResponse) -> dlp.ListInfoTypesResponse: + """Post-rpc interceptor for list_info_types + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_list_inspect_templates(self, request: dlp.ListInspectTemplatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListInspectTemplatesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_inspect_templates + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_inspect_templates(self, response: dlp.ListInspectTemplatesResponse) -> dlp.ListInspectTemplatesResponse: + """Post-rpc interceptor for list_inspect_templates + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_list_job_triggers(self, request: dlp.ListJobTriggersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListJobTriggersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_job_triggers + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_job_triggers(self, response: dlp.ListJobTriggersResponse) -> dlp.ListJobTriggersResponse: + """Post-rpc interceptor for list_job_triggers + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_list_stored_info_types(self, request: dlp.ListStoredInfoTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListStoredInfoTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_stored_info_types + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_stored_info_types(self, response: dlp.ListStoredInfoTypesResponse) -> dlp.ListStoredInfoTypesResponse: + """Post-rpc interceptor for list_stored_info_types + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_redact_image(self, request: dlp.RedactImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.RedactImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for redact_image + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_redact_image(self, response: dlp.RedactImageResponse) -> dlp.RedactImageResponse: + """Post-rpc interceptor for redact_image + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_reidentify_content(self, request: dlp.ReidentifyContentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ReidentifyContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for reidentify_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_reidentify_content(self, response: dlp.ReidentifyContentResponse) -> dlp.ReidentifyContentResponse: + """Post-rpc interceptor for reidentify_content + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_update_deidentify_template(self, request: dlp.UpdateDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: + """Post-rpc interceptor for update_deidentify_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_update_inspect_template(self, request: dlp.UpdateInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateInspectTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: + """Post-rpc interceptor for update_inspect_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_update_job_trigger(self, request: dlp.UpdateJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: + """Post-rpc interceptor for update_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + def pre_update_stored_info_type(self, request: dlp.UpdateStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: + """Post-rpc interceptor for update_stored_info_type + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DlpServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DlpServiceRestInterceptor + + +class DlpServiceRestTransport(DlpServiceTransport): + """REST backend transport for DlpService. + + The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[DlpServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DlpServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ActivateJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("ActivateJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ActivateJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DlpJob: + r"""Call the activate job trigger method over HTTP. + + Args: + request (~.dlp.ActivateJobTriggerRequest): + The request object. Request message for + ActivateJobTrigger. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/jobTriggers/*}:activate', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}:activate', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_activate_job_trigger(request, metadata) + pb_request = dlp.ActivateJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DlpJob() + pb_resp = dlp.DlpJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_activate_job_trigger(resp) + return resp + + class _CancelDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("CancelDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.CancelDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the cancel dlp job method over HTTP. + + Args: + request (~.dlp.CancelDlpJobRequest): + The request object. The request message for canceling a + DLP job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/dlpJobs/*}:cancel', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:cancel', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_cancel_dlp_job(request, metadata) + pb_request = dlp.CancelDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _CreateDeidentifyTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("CreateDeidentifyTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.CreateDeidentifyTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DeidentifyTemplate: + r"""Call the create deidentify + template method over HTTP. + + Args: + request (~.dlp.CreateDeidentifyTemplateRequest): + The request object. Request message for + CreateDeidentifyTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*}/deidentifyTemplates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/deidentifyTemplates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/deidentifyTemplates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/deidentifyTemplates', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_deidentify_template(request, metadata) + pb_request = dlp.CreateDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyTemplate() + pb_resp = dlp.DeidentifyTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_deidentify_template(resp) + return resp + + class _CreateDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("CreateDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.CreateDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DlpJob: + r"""Call the create dlp job method over HTTP. + + Args: + request (~.dlp.CreateDlpJobRequest): + The request object. Request message for + CreateDlpJobRequest. Used to initiate + long running jobs such as calculating + risk metrics or inspecting Google Cloud + Storage. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/dlpJobs', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/dlpJobs', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_dlp_job(request, metadata) + pb_request = dlp.CreateDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DlpJob() + pb_resp = dlp.DlpJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_dlp_job(resp) + return resp + + class _CreateInspectTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("CreateInspectTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.CreateInspectTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.InspectTemplate: + r"""Call the create inspect template method over HTTP. + + Args: + request (~.dlp.CreateInspectTemplateRequest): + The request object. Request message for + CreateInspectTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*}/inspectTemplates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/inspectTemplates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/inspectTemplates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/inspectTemplates', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_inspect_template(request, metadata) + pb_request = dlp.CreateInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectTemplate() + pb_resp = dlp.InspectTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_inspect_template(resp) + return resp + + class _CreateJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("CreateJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.CreateJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.JobTrigger: + r"""Call the create job trigger method over HTTP. + + Args: + request (~.dlp.CreateJobTriggerRequest): + The request object. Request message for CreateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/jobTriggers', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/jobTriggers', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/jobTriggers', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_job_trigger(request, metadata) + pb_request = dlp.CreateJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.JobTrigger() + pb_resp = dlp.JobTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_job_trigger(resp) + return resp + + class _CreateStoredInfoType(DlpServiceRestStub): + def __hash__(self): + return hash("CreateStoredInfoType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.CreateStoredInfoTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.StoredInfoType: + r"""Call the create stored info type method over HTTP. + + Args: + request (~.dlp.CreateStoredInfoTypeRequest): + The request object. Request message for + CreateStoredInfoType. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*}/storedInfoTypes', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/storedInfoTypes', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/storedInfoTypes', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/storedInfoTypes', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_stored_info_type(request, metadata) + pb_request = dlp.CreateStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.StoredInfoType() + pb_resp = dlp.StoredInfoType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_stored_info_type(resp) + return resp + + class _DeidentifyContent(DlpServiceRestStub): + def __hash__(self): + return hash("DeidentifyContent") + + def __call__(self, + request: dlp.DeidentifyContentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DeidentifyContentResponse: + r"""Call the deidentify content method over HTTP. + + Args: + request (~.dlp.DeidentifyContentRequest): + The request object. Request to de-identify a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/content:deidentify', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/content:deidentify', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_deidentify_content(request, metadata) + pb_request = dlp.DeidentifyContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyContentResponse() + pb_resp = dlp.DeidentifyContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_deidentify_content(resp) + return resp + + class _DeleteDeidentifyTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteDeidentifyTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.DeleteDeidentifyTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete deidentify + template method over HTTP. + + Args: + request (~.dlp.DeleteDeidentifyTemplateRequest): + The request object. Request message for + DeleteDeidentifyTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_deidentify_template(request, metadata) + pb_request = dlp.DeleteDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.DeleteDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete dlp job method over HTTP. + + Args: + request (~.dlp.DeleteDlpJobRequest): + The request object. The request message for deleting a + DLP job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/dlpJobs/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_dlp_job(request, metadata) + pb_request = dlp.DeleteDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteInspectTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteInspectTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.DeleteInspectTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete inspect template method over HTTP. + + Args: + request (~.dlp.DeleteInspectTemplateRequest): + The request object. Request message for + DeleteInspectTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/inspectTemplates/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_inspect_template(request, metadata) + pb_request = dlp.DeleteInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.DeleteJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete job trigger method over HTTP. + + Args: + request (~.dlp.DeleteJobTriggerRequest): + The request object. Request message for DeleteJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/jobTriggers/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_job_trigger(request, metadata) + pb_request = dlp.DeleteJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteStoredInfoType(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteStoredInfoType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.DeleteStoredInfoTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete stored info type method over HTTP. + + Args: + request (~.dlp.DeleteStoredInfoTypeRequest): + The request object. Request message for + DeleteStoredInfoType. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_stored_info_type(request, metadata) + pb_request = dlp.DeleteStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _FinishDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("FinishDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.FinishDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the finish dlp job method over HTTP. + + Args: + request (~.dlp.FinishDlpJobRequest): + The request object. The request message for finishing a + DLP hybrid job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:finish', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_finish_dlp_job(request, metadata) + pb_request = dlp.FinishDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetDeidentifyTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("GetDeidentifyTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.GetDeidentifyTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DeidentifyTemplate: + r"""Call the get deidentify template method over HTTP. + + Args: + request (~.dlp.GetDeidentifyTemplateRequest): + The request object. Request message for + GetDeidentifyTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', + }, + ] + request, metadata = self._interceptor.pre_get_deidentify_template(request, metadata) + pb_request = dlp.GetDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyTemplate() + pb_resp = dlp.DeidentifyTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_deidentify_template(resp) + return resp + + class _GetDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("GetDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.GetDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DlpJob: + r"""Call the get dlp job method over HTTP. + + Args: + request (~.dlp.GetDlpJobRequest): + The request object. The request message for [DlpJobs.GetDlpJob][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/dlpJobs/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}', + }, + ] + request, metadata = self._interceptor.pre_get_dlp_job(request, metadata) + pb_request = dlp.GetDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DlpJob() + pb_resp = dlp.DlpJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_dlp_job(resp) + return resp + + class _GetInspectTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("GetInspectTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.GetInspectTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.InspectTemplate: + r"""Call the get inspect template method over HTTP. + + Args: + request (~.dlp.GetInspectTemplateRequest): + The request object. Request message for + GetInspectTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/inspectTemplates/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', + }, + ] + request, metadata = self._interceptor.pre_get_inspect_template(request, metadata) + pb_request = dlp.GetInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectTemplate() + pb_resp = dlp.InspectTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_inspect_template(resp) + return resp + + class _GetJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("GetJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.GetJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.JobTrigger: + r"""Call the get job trigger method over HTTP. + + Args: + request (~.dlp.GetJobTriggerRequest): + The request object. Request message for GetJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/jobTriggers/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', + }, + ] + request, metadata = self._interceptor.pre_get_job_trigger(request, metadata) + pb_request = dlp.GetJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.JobTrigger() + pb_resp = dlp.JobTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job_trigger(resp) + return resp + + class _GetStoredInfoType(DlpServiceRestStub): + def __hash__(self): + return hash("GetStoredInfoType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.GetStoredInfoTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.StoredInfoType: + r"""Call the get stored info type method over HTTP. + + Args: + request (~.dlp.GetStoredInfoTypeRequest): + The request object. Request message for + GetStoredInfoType. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', + }, + ] + request, metadata = self._interceptor.pre_get_stored_info_type(request, metadata) + pb_request = dlp.GetStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.StoredInfoType() + pb_resp = dlp.StoredInfoType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_stored_info_type(resp) + return resp + + class _HybridInspectDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("HybridInspectDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.HybridInspectDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.HybridInspectResponse: + r"""Call the hybrid inspect dlp job method over HTTP. + + Args: + request (~.dlp.HybridInspectDlpJobRequest): + The request object. Request to search for potentially + sensitive info in a custom location. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_hybrid_inspect_dlp_job(request, metadata) + pb_request = dlp.HybridInspectDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.HybridInspectResponse() + pb_resp = dlp.HybridInspectResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_hybrid_inspect_dlp_job(resp) + return resp + + class _HybridInspectJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("HybridInspectJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.HybridInspectJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.HybridInspectResponse: + r"""Call the hybrid inspect job + trigger method over HTTP. + + Args: + request (~.dlp.HybridInspectJobTriggerRequest): + The request object. Request to search for potentially + sensitive info in a custom location. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_hybrid_inspect_job_trigger(request, metadata) + pb_request = dlp.HybridInspectJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.HybridInspectResponse() + pb_resp = dlp.HybridInspectResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_hybrid_inspect_job_trigger(resp) + return resp + + class _InspectContent(DlpServiceRestStub): + def __hash__(self): + return hash("InspectContent") + + def __call__(self, + request: dlp.InspectContentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.InspectContentResponse: + r"""Call the inspect content method over HTTP. + + Args: + request (~.dlp.InspectContentRequest): + The request object. Request to search for potentially + sensitive info in a ContentItem. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectContentResponse: + Results of inspecting an item. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/content:inspect', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/content:inspect', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_inspect_content(request, metadata) + pb_request = dlp.InspectContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectContentResponse() + pb_resp = dlp.InspectContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_inspect_content(resp) + return resp + + class _ListDeidentifyTemplates(DlpServiceRestStub): + def __hash__(self): + return hash("ListDeidentifyTemplates") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ListDeidentifyTemplatesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ListDeidentifyTemplatesResponse: + r"""Call the list deidentify templates method over HTTP. + + Args: + request (~.dlp.ListDeidentifyTemplatesRequest): + The request object. Request message for + ListDeidentifyTemplates. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListDeidentifyTemplatesResponse: + Response message for + ListDeidentifyTemplates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*}/deidentifyTemplates', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/deidentifyTemplates', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/deidentifyTemplates', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/deidentifyTemplates', + }, + ] + request, metadata = self._interceptor.pre_list_deidentify_templates(request, metadata) + pb_request = dlp.ListDeidentifyTemplatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListDeidentifyTemplatesResponse() + pb_resp = dlp.ListDeidentifyTemplatesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_deidentify_templates(resp) + return resp + + class _ListDlpJobs(DlpServiceRestStub): + def __hash__(self): + return hash("ListDlpJobs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ListDlpJobsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ListDlpJobsResponse: + r"""Call the list dlp jobs method over HTTP. + + Args: + request (~.dlp.ListDlpJobsRequest): + The request object. The request message for listing DLP + jobs. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListDlpJobsResponse: + The response message for listing DLP + jobs. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/dlpJobs', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/dlpJobs', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/dlpJobs', + }, + ] + request, metadata = self._interceptor.pre_list_dlp_jobs(request, metadata) + pb_request = dlp.ListDlpJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListDlpJobsResponse() + pb_resp = dlp.ListDlpJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_dlp_jobs(resp) + return resp + + class _ListInfoTypes(DlpServiceRestStub): + def __hash__(self): + return hash("ListInfoTypes") + + def __call__(self, + request: dlp.ListInfoTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ListInfoTypesResponse: + r"""Call the list info types method over HTTP. + + Args: + request (~.dlp.ListInfoTypesRequest): + The request object. Request for the list of infoTypes. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/infoTypes', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=locations/*}/infoTypes', + }, + ] + request, metadata = self._interceptor.pre_list_info_types(request, metadata) + pb_request = dlp.ListInfoTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListInfoTypesResponse() + pb_resp = dlp.ListInfoTypesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_info_types(resp) + return resp + + class _ListInspectTemplates(DlpServiceRestStub): + def __hash__(self): + return hash("ListInspectTemplates") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ListInspectTemplatesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ListInspectTemplatesResponse: + r"""Call the list inspect templates method over HTTP. + + Args: + request (~.dlp.ListInspectTemplatesRequest): + The request object. Request message for + ListInspectTemplates. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListInspectTemplatesResponse: + Response message for + ListInspectTemplates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*}/inspectTemplates', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/inspectTemplates', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/inspectTemplates', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/inspectTemplates', + }, + ] + request, metadata = self._interceptor.pre_list_inspect_templates(request, metadata) + pb_request = dlp.ListInspectTemplatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListInspectTemplatesResponse() + pb_resp = dlp.ListInspectTemplatesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_inspect_templates(resp) + return resp + + class _ListJobTriggers(DlpServiceRestStub): + def __hash__(self): + return hash("ListJobTriggers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ListJobTriggersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ListJobTriggersResponse: + r"""Call the list job triggers method over HTTP. + + Args: + request (~.dlp.ListJobTriggersRequest): + The request object. Request message for ListJobTriggers. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListJobTriggersResponse: + Response message for ListJobTriggers. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/jobTriggers', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/jobTriggers', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/jobTriggers', + }, + ] + request, metadata = self._interceptor.pre_list_job_triggers(request, metadata) + pb_request = dlp.ListJobTriggersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListJobTriggersResponse() + pb_resp = dlp.ListJobTriggersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_job_triggers(resp) + return resp + + class _ListStoredInfoTypes(DlpServiceRestStub): + def __hash__(self): + return hash("ListStoredInfoTypes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ListStoredInfoTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ListStoredInfoTypesResponse: + r"""Call the list stored info types method over HTTP. + + Args: + request (~.dlp.ListStoredInfoTypesRequest): + The request object. Request message for + ListStoredInfoTypes. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListStoredInfoTypesResponse: + Response message for + ListStoredInfoTypes. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*}/storedInfoTypes', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/storedInfoTypes', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/storedInfoTypes', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/storedInfoTypes', + }, + ] + request, metadata = self._interceptor.pre_list_stored_info_types(request, metadata) + pb_request = dlp.ListStoredInfoTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListStoredInfoTypesResponse() + pb_resp = dlp.ListStoredInfoTypesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_stored_info_types(resp) + return resp + + class _RedactImage(DlpServiceRestStub): + def __hash__(self): + return hash("RedactImage") + + def __call__(self, + request: dlp.RedactImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.RedactImageResponse: + r"""Call the redact image method over HTTP. + + Args: + request (~.dlp.RedactImageRequest): + The request object. Request to search for potentially + sensitive info in an image and redact it + by covering it with a colored rectangle. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.RedactImageResponse: + Results of redacting an image. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/image:redact', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/image:redact', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_redact_image(request, metadata) + pb_request = dlp.RedactImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.RedactImageResponse() + pb_resp = dlp.RedactImageResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_redact_image(resp) + return resp + + class _ReidentifyContent(DlpServiceRestStub): + def __hash__(self): + return hash("ReidentifyContent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.ReidentifyContentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.ReidentifyContentResponse: + r"""Call the reidentify content method over HTTP. + + Args: + request (~.dlp.ReidentifyContentRequest): + The request object. Request to re-identify an item. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ReidentifyContentResponse: + Results of re-identifying an item. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/content:reidentify', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/content:reidentify', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_reidentify_content(request, metadata) + pb_request = dlp.ReidentifyContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ReidentifyContentResponse() + pb_resp = dlp.ReidentifyContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reidentify_content(resp) + return resp + + class _UpdateDeidentifyTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("UpdateDeidentifyTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.UpdateDeidentifyTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.DeidentifyTemplate: + r"""Call the update deidentify + template method over HTTP. + + Args: + request (~.dlp.UpdateDeidentifyTemplateRequest): + The request object. Request message for + UpdateDeidentifyTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_update_deidentify_template(request, metadata) + pb_request = dlp.UpdateDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyTemplate() + pb_resp = dlp.DeidentifyTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_deidentify_template(resp) + return resp + + class _UpdateInspectTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("UpdateInspectTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.UpdateInspectTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.InspectTemplate: + r"""Call the update inspect template method over HTTP. + + Args: + request (~.dlp.UpdateInspectTemplateRequest): + The request object. Request message for + UpdateInspectTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/inspectTemplates/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_update_inspect_template(request, metadata) + pb_request = dlp.UpdateInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectTemplate() + pb_resp = dlp.InspectTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_inspect_template(resp) + return resp + + class _UpdateJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("UpdateJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.UpdateJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.JobTrigger: + r"""Call the update job trigger method over HTTP. + + Args: + request (~.dlp.UpdateJobTriggerRequest): + The request object. Request message for UpdateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/jobTriggers/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_update_job_trigger(request, metadata) + pb_request = dlp.UpdateJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.JobTrigger() + pb_resp = dlp.JobTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_job_trigger(resp) + return resp + + class _UpdateStoredInfoType(DlpServiceRestStub): + def __hash__(self): + return hash("UpdateStoredInfoType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: dlp.UpdateStoredInfoTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> dlp.StoredInfoType: + r"""Call the update stored info type method over HTTP. + + Args: + request (~.dlp.UpdateStoredInfoTypeRequest): + The request object. Request message for + UpdateStoredInfoType. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', + 'body': '*', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_update_stored_info_type(request, metadata) + pb_request = dlp.UpdateStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.StoredInfoType() + pb_resp = dlp.StoredInfoType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_stored_info_type(resp) + return resp + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + dlp.DlpJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ActivateJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + dlp.DlpJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + dlp.InspectTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + dlp.JobTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + dlp.StoredInfoType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + dlp.DeidentifyContentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeidentifyContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FinishDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + dlp.DlpJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + dlp.InspectTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + dlp.JobTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + dlp.StoredInfoType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + dlp.HybridInspectResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._HybridInspectDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + dlp.HybridInspectResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._HybridInspectJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + dlp.InspectContentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._InspectContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + dlp.ListDeidentifyTemplatesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDeidentifyTemplates(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + dlp.ListDlpJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDlpJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + dlp.ListInfoTypesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInfoTypes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + dlp.ListInspectTemplatesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInspectTemplates(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + dlp.ListJobTriggersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListJobTriggers(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + dlp.ListStoredInfoTypesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListStoredInfoTypes(self._session, self._host, self._interceptor) # type: ignore + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + dlp.RedactImageResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RedactImage(self._session, self._host, self._interceptor) # type: ignore + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + dlp.ReidentifyContentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ReidentifyContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + dlp.InspectTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + dlp.JobTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + dlp.StoredInfoType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'DlpServiceRestTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py new file mode 100644 index 00000000..5bc3d949 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py @@ -0,0 +1,390 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .dlp import ( + Action, + ActivateJobTriggerRequest, + AnalyzeDataSourceRiskDetails, + BoundingBox, + BucketingConfig, + ByteContentItem, + CancelDlpJobRequest, + CharacterMaskConfig, + CharsToIgnore, + Color, + Container, + ContentItem, + ContentLocation, + CreateDeidentifyTemplateRequest, + CreateDlpJobRequest, + CreateInspectTemplateRequest, + CreateJobTriggerRequest, + CreateStoredInfoTypeRequest, + CryptoDeterministicConfig, + CryptoHashConfig, + CryptoKey, + CryptoReplaceFfxFpeConfig, + DataProfileAction, + DataProfileConfigSnapshot, + DataProfileJobConfig, + DataProfileLocation, + DataProfilePubSubCondition, + DataProfilePubSubMessage, + DataRiskLevel, + DateShiftConfig, + DateTime, + DeidentifyConfig, + DeidentifyContentRequest, + DeidentifyContentResponse, + DeidentifyTemplate, + DeleteDeidentifyTemplateRequest, + DeleteDlpJobRequest, + DeleteInspectTemplateRequest, + DeleteJobTriggerRequest, + DeleteStoredInfoTypeRequest, + DlpJob, + DocumentLocation, + Error, + ExcludeByHotword, + ExcludeInfoTypes, + ExclusionRule, + FieldTransformation, + Finding, + FinishDlpJobRequest, + FixedSizeBucketingConfig, + GetDeidentifyTemplateRequest, + GetDlpJobRequest, + GetInspectTemplateRequest, + GetJobTriggerRequest, + GetStoredInfoTypeRequest, + HybridContentItem, + HybridFindingDetails, + HybridInspectDlpJobRequest, + HybridInspectJobTriggerRequest, + HybridInspectResponse, + HybridInspectStatistics, + ImageLocation, + ImageTransformations, + InfoTypeCategory, + InfoTypeDescription, + InfoTypeStats, + InfoTypeSummary, + InfoTypeTransformations, + InspectConfig, + InspectContentRequest, + InspectContentResponse, + InspectDataSourceDetails, + InspectionRule, + InspectionRuleSet, + InspectJobConfig, + InspectResult, + InspectTemplate, + JobTrigger, + KmsWrappedCryptoKey, + LargeCustomDictionaryConfig, + LargeCustomDictionaryStats, + ListDeidentifyTemplatesRequest, + ListDeidentifyTemplatesResponse, + ListDlpJobsRequest, + ListDlpJobsResponse, + ListInfoTypesRequest, + ListInfoTypesResponse, + ListInspectTemplatesRequest, + ListInspectTemplatesResponse, + ListJobTriggersRequest, + ListJobTriggersResponse, + ListStoredInfoTypesRequest, + ListStoredInfoTypesResponse, + Location, + Manual, + MetadataLocation, + OtherInfoTypeSummary, + OutputStorageConfig, + PrimitiveTransformation, + PrivacyMetric, + ProfileStatus, + QuasiId, + QuoteInfo, + Range, + RecordCondition, + RecordLocation, + RecordSuppression, + RecordTransformation, + RecordTransformations, + RedactConfig, + RedactImageRequest, + RedactImageResponse, + ReidentifyContentRequest, + ReidentifyContentResponse, + ReplaceDictionaryConfig, + ReplaceValueConfig, + ReplaceWithInfoTypeConfig, + RiskAnalysisJobConfig, + Schedule, + StatisticalTable, + StorageMetadataLabel, + StoredInfoType, + StoredInfoTypeConfig, + StoredInfoTypeStats, + StoredInfoTypeVersion, + Table, + TableDataProfile, + TableLocation, + TimePartConfig, + TransformationConfig, + TransformationDescription, + TransformationDetails, + TransformationDetailsStorageConfig, + TransformationErrorHandling, + TransformationLocation, + TransformationOverview, + TransformationResultStatus, + TransformationSummary, + TransientCryptoKey, + UnwrappedCryptoKey, + UpdateDeidentifyTemplateRequest, + UpdateInspectTemplateRequest, + UpdateJobTriggerRequest, + UpdateStoredInfoTypeRequest, + Value, + ValueFrequency, + VersionDescription, + ContentOption, + DlpJobType, + EncryptionStatus, + InfoTypeSupportedBy, + MatchingType, + MetadataType, + RelationalOperator, + ResourceVisibility, + StoredInfoTypeState, + TransformationContainerType, + TransformationResultStatusType, + TransformationType, +) +from .storage import ( + BigQueryField, + BigQueryKey, + BigQueryOptions, + BigQueryTable, + CloudStorageFileSet, + CloudStorageOptions, + CloudStoragePath, + CloudStorageRegexFileSet, + CustomInfoType, + DatastoreKey, + DatastoreOptions, + EntityId, + FieldId, + HybridOptions, + InfoType, + Key, + KindExpression, + PartitionId, + RecordKey, + SensitivityScore, + StorageConfig, + StoredType, + TableOptions, + FileType, + Likelihood, +) + +__all__ = ( + 'Action', + 'ActivateJobTriggerRequest', + 'AnalyzeDataSourceRiskDetails', + 'BoundingBox', + 'BucketingConfig', + 'ByteContentItem', + 'CancelDlpJobRequest', + 'CharacterMaskConfig', + 'CharsToIgnore', + 'Color', + 'Container', + 'ContentItem', + 'ContentLocation', + 'CreateDeidentifyTemplateRequest', + 'CreateDlpJobRequest', + 'CreateInspectTemplateRequest', + 'CreateJobTriggerRequest', + 'CreateStoredInfoTypeRequest', + 'CryptoDeterministicConfig', + 'CryptoHashConfig', + 'CryptoKey', + 'CryptoReplaceFfxFpeConfig', + 'DataProfileAction', + 'DataProfileConfigSnapshot', + 'DataProfileJobConfig', + 'DataProfileLocation', + 'DataProfilePubSubCondition', + 'DataProfilePubSubMessage', + 'DataRiskLevel', + 'DateShiftConfig', + 'DateTime', + 'DeidentifyConfig', + 'DeidentifyContentRequest', + 'DeidentifyContentResponse', + 'DeidentifyTemplate', + 'DeleteDeidentifyTemplateRequest', + 'DeleteDlpJobRequest', + 'DeleteInspectTemplateRequest', + 'DeleteJobTriggerRequest', + 'DeleteStoredInfoTypeRequest', + 'DlpJob', + 'DocumentLocation', + 'Error', + 'ExcludeByHotword', + 'ExcludeInfoTypes', + 'ExclusionRule', + 'FieldTransformation', + 'Finding', + 'FinishDlpJobRequest', + 'FixedSizeBucketingConfig', + 'GetDeidentifyTemplateRequest', + 'GetDlpJobRequest', + 'GetInspectTemplateRequest', + 'GetJobTriggerRequest', + 'GetStoredInfoTypeRequest', + 'HybridContentItem', + 'HybridFindingDetails', + 'HybridInspectDlpJobRequest', + 'HybridInspectJobTriggerRequest', + 'HybridInspectResponse', + 'HybridInspectStatistics', + 'ImageLocation', + 'ImageTransformations', + 'InfoTypeCategory', + 'InfoTypeDescription', + 'InfoTypeStats', + 'InfoTypeSummary', + 'InfoTypeTransformations', + 'InspectConfig', + 'InspectContentRequest', + 'InspectContentResponse', + 'InspectDataSourceDetails', + 'InspectionRule', + 'InspectionRuleSet', + 'InspectJobConfig', + 'InspectResult', + 'InspectTemplate', + 'JobTrigger', + 'KmsWrappedCryptoKey', + 'LargeCustomDictionaryConfig', + 'LargeCustomDictionaryStats', + 'ListDeidentifyTemplatesRequest', + 'ListDeidentifyTemplatesResponse', + 'ListDlpJobsRequest', + 'ListDlpJobsResponse', + 'ListInfoTypesRequest', + 'ListInfoTypesResponse', + 'ListInspectTemplatesRequest', + 'ListInspectTemplatesResponse', + 'ListJobTriggersRequest', + 'ListJobTriggersResponse', + 'ListStoredInfoTypesRequest', + 'ListStoredInfoTypesResponse', + 'Location', + 'Manual', + 'MetadataLocation', + 'OtherInfoTypeSummary', + 'OutputStorageConfig', + 'PrimitiveTransformation', + 'PrivacyMetric', + 'ProfileStatus', + 'QuasiId', + 'QuoteInfo', + 'Range', + 'RecordCondition', + 'RecordLocation', + 'RecordSuppression', + 'RecordTransformation', + 'RecordTransformations', + 'RedactConfig', + 'RedactImageRequest', + 'RedactImageResponse', + 'ReidentifyContentRequest', + 'ReidentifyContentResponse', + 'ReplaceDictionaryConfig', + 'ReplaceValueConfig', + 'ReplaceWithInfoTypeConfig', + 'RiskAnalysisJobConfig', + 'Schedule', + 'StatisticalTable', + 'StorageMetadataLabel', + 'StoredInfoType', + 'StoredInfoTypeConfig', + 'StoredInfoTypeStats', + 'StoredInfoTypeVersion', + 'Table', + 'TableDataProfile', + 'TableLocation', + 'TimePartConfig', + 'TransformationConfig', + 'TransformationDescription', + 'TransformationDetails', + 'TransformationDetailsStorageConfig', + 'TransformationErrorHandling', + 'TransformationLocation', + 'TransformationOverview', + 'TransformationResultStatus', + 'TransformationSummary', + 'TransientCryptoKey', + 'UnwrappedCryptoKey', + 'UpdateDeidentifyTemplateRequest', + 'UpdateInspectTemplateRequest', + 'UpdateJobTriggerRequest', + 'UpdateStoredInfoTypeRequest', + 'Value', + 'ValueFrequency', + 'VersionDescription', + 'ContentOption', + 'DlpJobType', + 'EncryptionStatus', + 'InfoTypeSupportedBy', + 'MatchingType', + 'MetadataType', + 'RelationalOperator', + 'ResourceVisibility', + 'StoredInfoTypeState', + 'TransformationContainerType', + 'TransformationResultStatusType', + 'TransformationType', + 'BigQueryField', + 'BigQueryKey', + 'BigQueryOptions', + 'BigQueryTable', + 'CloudStorageFileSet', + 'CloudStorageOptions', + 'CloudStoragePath', + 'CloudStorageRegexFileSet', + 'CustomInfoType', + 'DatastoreKey', + 'DatastoreOptions', + 'EntityId', + 'FieldId', + 'HybridOptions', + 'InfoType', + 'Key', + 'KindExpression', + 'PartitionId', + 'RecordKey', + 'SensitivityScore', + 'StorageConfig', + 'StoredType', + 'TableOptions', + 'FileType', + 'Likelihood', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py new file mode 100644 index 00000000..926b57bc --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py @@ -0,0 +1,8848 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dlp_v2.types import storage +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.privacy.dlp.v2', + manifest={ + 'TransformationResultStatusType', + 'TransformationContainerType', + 'TransformationType', + 'RelationalOperator', + 'MatchingType', + 'ContentOption', + 'MetadataType', + 'InfoTypeSupportedBy', + 'DlpJobType', + 'StoredInfoTypeState', + 'ResourceVisibility', + 'EncryptionStatus', + 'ExcludeInfoTypes', + 'ExcludeByHotword', + 'ExclusionRule', + 'InspectionRule', + 'InspectionRuleSet', + 'InspectConfig', + 'ByteContentItem', + 'ContentItem', + 'Table', + 'InspectResult', + 'Finding', + 'Location', + 'ContentLocation', + 'MetadataLocation', + 'StorageMetadataLabel', + 'DocumentLocation', + 'RecordLocation', + 'TableLocation', + 'Container', + 'Range', + 'ImageLocation', + 'BoundingBox', + 'RedactImageRequest', + 'Color', + 'RedactImageResponse', + 'DeidentifyContentRequest', + 'DeidentifyContentResponse', + 'ReidentifyContentRequest', + 'ReidentifyContentResponse', + 'InspectContentRequest', + 'InspectContentResponse', + 'OutputStorageConfig', + 'InfoTypeStats', + 'InspectDataSourceDetails', + 'HybridInspectStatistics', + 'InfoTypeDescription', + 'InfoTypeCategory', + 'VersionDescription', + 'ListInfoTypesRequest', + 'ListInfoTypesResponse', + 'RiskAnalysisJobConfig', + 'QuasiId', + 'StatisticalTable', + 'PrivacyMetric', + 'AnalyzeDataSourceRiskDetails', + 'ValueFrequency', + 'Value', + 'QuoteInfo', + 'DateTime', + 'DeidentifyConfig', + 'ImageTransformations', + 'TransformationErrorHandling', + 'PrimitiveTransformation', + 'TimePartConfig', + 'CryptoHashConfig', + 'CryptoDeterministicConfig', + 'ReplaceValueConfig', + 'ReplaceDictionaryConfig', + 'ReplaceWithInfoTypeConfig', + 'RedactConfig', + 'CharsToIgnore', + 'CharacterMaskConfig', + 'FixedSizeBucketingConfig', + 'BucketingConfig', + 'CryptoReplaceFfxFpeConfig', + 'CryptoKey', + 'TransientCryptoKey', + 'UnwrappedCryptoKey', + 'KmsWrappedCryptoKey', + 'DateShiftConfig', + 'InfoTypeTransformations', + 'FieldTransformation', + 'RecordTransformations', + 'RecordSuppression', + 'RecordCondition', + 'TransformationOverview', + 'TransformationSummary', + 'TransformationDescription', + 'TransformationDetails', + 'TransformationLocation', + 'RecordTransformation', + 'TransformationResultStatus', + 'TransformationDetailsStorageConfig', + 'Schedule', + 'Manual', + 'InspectTemplate', + 'DeidentifyTemplate', + 'Error', + 'JobTrigger', + 'Action', + 'TransformationConfig', + 'CreateInspectTemplateRequest', + 'UpdateInspectTemplateRequest', + 'GetInspectTemplateRequest', + 'ListInspectTemplatesRequest', + 'ListInspectTemplatesResponse', + 'DeleteInspectTemplateRequest', + 'CreateJobTriggerRequest', + 'ActivateJobTriggerRequest', + 'UpdateJobTriggerRequest', + 'GetJobTriggerRequest', + 'CreateDlpJobRequest', + 'ListJobTriggersRequest', + 'ListJobTriggersResponse', + 'DeleteJobTriggerRequest', + 'InspectJobConfig', + 'DataProfileAction', + 'DataProfileJobConfig', + 'DataProfileLocation', + 'DlpJob', + 'GetDlpJobRequest', + 'ListDlpJobsRequest', + 'ListDlpJobsResponse', + 'CancelDlpJobRequest', + 'FinishDlpJobRequest', + 'DeleteDlpJobRequest', + 'CreateDeidentifyTemplateRequest', + 'UpdateDeidentifyTemplateRequest', + 'GetDeidentifyTemplateRequest', + 'ListDeidentifyTemplatesRequest', + 'ListDeidentifyTemplatesResponse', + 'DeleteDeidentifyTemplateRequest', + 'LargeCustomDictionaryConfig', + 'LargeCustomDictionaryStats', + 'StoredInfoTypeConfig', + 'StoredInfoTypeStats', + 'StoredInfoTypeVersion', + 'StoredInfoType', + 'CreateStoredInfoTypeRequest', + 'UpdateStoredInfoTypeRequest', + 'GetStoredInfoTypeRequest', + 'ListStoredInfoTypesRequest', + 'ListStoredInfoTypesResponse', + 'DeleteStoredInfoTypeRequest', + 'HybridInspectJobTriggerRequest', + 'HybridInspectDlpJobRequest', + 'HybridContentItem', + 'HybridFindingDetails', + 'HybridInspectResponse', + 'DataRiskLevel', + 'DataProfileConfigSnapshot', + 'TableDataProfile', + 'ProfileStatus', + 'InfoTypeSummary', + 'OtherInfoTypeSummary', + 'DataProfilePubSubCondition', + 'DataProfilePubSubMessage', + }, +) + + +class TransformationResultStatusType(proto.Enum): + r"""Enum of possible outcomes of transformations. SUCCESS if + transformation and storing of transformation was successful, + otherwise, reason for not transforming. + + Values: + STATE_TYPE_UNSPECIFIED (0): + No description available. + INVALID_TRANSFORM (1): + This will be set when a finding could not be + transformed (i.e. outside user set bucket + range). + BIGQUERY_MAX_ROW_SIZE_EXCEEDED (2): + This will be set when a BigQuery + transformation was successful but could not be + stored back in BigQuery because the transformed + row exceeds BigQuery's max row size. + METADATA_UNRETRIEVABLE (3): + This will be set when there is a finding in + the custom metadata of a file, but at the write + time of the transformed file, this key / value + pair is unretrievable. + SUCCESS (4): + This will be set when the transformation and + storing of it is successful. + """ + STATE_TYPE_UNSPECIFIED = 0 + INVALID_TRANSFORM = 1 + BIGQUERY_MAX_ROW_SIZE_EXCEEDED = 2 + METADATA_UNRETRIEVABLE = 3 + SUCCESS = 4 + + +class TransformationContainerType(proto.Enum): + r"""Describes functionality of a given container in its original + format. + + Values: + TRANSFORM_UNKNOWN_CONTAINER (0): + No description available. + TRANSFORM_BODY (1): + No description available. + TRANSFORM_METADATA (2): + No description available. + TRANSFORM_TABLE (3): + No description available. + """ + TRANSFORM_UNKNOWN_CONTAINER = 0 + TRANSFORM_BODY = 1 + TRANSFORM_METADATA = 2 + TRANSFORM_TABLE = 3 + + +class TransformationType(proto.Enum): + r"""An enum of rules that can be used to transform a value. Can be a + record suppression, or one of the transformation rules specified + under ``PrimitiveTransformation``. + + Values: + TRANSFORMATION_TYPE_UNSPECIFIED (0): + Unused + RECORD_SUPPRESSION (1): + Record suppression + REPLACE_VALUE (2): + Replace value + REPLACE_DICTIONARY (15): + Replace value using a dictionary. + REDACT (3): + Redact + CHARACTER_MASK (4): + Character mask + CRYPTO_REPLACE_FFX_FPE (5): + FFX-FPE + FIXED_SIZE_BUCKETING (6): + Fixed size bucketing + BUCKETING (7): + Bucketing + REPLACE_WITH_INFO_TYPE (8): + Replace with info type + TIME_PART (9): + Time part + CRYPTO_HASH (10): + Crypto hash + DATE_SHIFT (12): + Date shift + CRYPTO_DETERMINISTIC_CONFIG (13): + Deterministic crypto + REDACT_IMAGE (14): + Redact image + """ + TRANSFORMATION_TYPE_UNSPECIFIED = 0 + RECORD_SUPPRESSION = 1 + REPLACE_VALUE = 2 + REPLACE_DICTIONARY = 15 + REDACT = 3 + CHARACTER_MASK = 4 + CRYPTO_REPLACE_FFX_FPE = 5 + FIXED_SIZE_BUCKETING = 6 + BUCKETING = 7 + REPLACE_WITH_INFO_TYPE = 8 + TIME_PART = 9 + CRYPTO_HASH = 10 + DATE_SHIFT = 12 + CRYPTO_DETERMINISTIC_CONFIG = 13 + REDACT_IMAGE = 14 + + +class RelationalOperator(proto.Enum): + r"""Operators available for comparing the value of fields. + + Values: + RELATIONAL_OPERATOR_UNSPECIFIED (0): + Unused + EQUAL_TO (1): + Equal. Attempts to match even with + incompatible types. + NOT_EQUAL_TO (2): + Not equal to. Attempts to match even with + incompatible types. + GREATER_THAN (3): + Greater than. + LESS_THAN (4): + Less than. + GREATER_THAN_OR_EQUALS (5): + Greater than or equals. + LESS_THAN_OR_EQUALS (6): + Less than or equals. + EXISTS (7): + Exists + """ + RELATIONAL_OPERATOR_UNSPECIFIED = 0 + EQUAL_TO = 1 + NOT_EQUAL_TO = 2 + GREATER_THAN = 3 + LESS_THAN = 4 + GREATER_THAN_OR_EQUALS = 5 + LESS_THAN_OR_EQUALS = 6 + EXISTS = 7 + + +class MatchingType(proto.Enum): + r"""Type of the match which can be applied to different ways of + matching, like Dictionary, regular expression and intersecting + with findings of another info type. + + Values: + MATCHING_TYPE_UNSPECIFIED (0): + Invalid. + MATCHING_TYPE_FULL_MATCH (1): + Full match. + - Dictionary: join of Dictionary results matched + complete finding quote - Regex: all regex + matches fill a finding quote start to end - + Exclude info type: completely inside affecting + info types findings + MATCHING_TYPE_PARTIAL_MATCH (2): + Partial match. + - Dictionary: at least one of the tokens in the + finding matches - Regex: substring of the + finding matches + - Exclude info type: intersects with affecting + info types findings + MATCHING_TYPE_INVERSE_MATCH (3): + Inverse match. + - Dictionary: no tokens in the finding match the + dictionary - Regex: finding doesn't match the + regex + - Exclude info type: no intersection with + affecting info types findings + """ + MATCHING_TYPE_UNSPECIFIED = 0 + MATCHING_TYPE_FULL_MATCH = 1 + MATCHING_TYPE_PARTIAL_MATCH = 2 + MATCHING_TYPE_INVERSE_MATCH = 3 + + +class ContentOption(proto.Enum): + r"""Deprecated and unused. + + Values: + CONTENT_UNSPECIFIED (0): + Includes entire content of a file or a data + stream. + CONTENT_TEXT (1): + Text content within the data, excluding any + metadata. + CONTENT_IMAGE (2): + Images found in the data. + """ + CONTENT_UNSPECIFIED = 0 + CONTENT_TEXT = 1 + CONTENT_IMAGE = 2 + + +class MetadataType(proto.Enum): + r"""Type of metadata containing the finding. + + Values: + METADATATYPE_UNSPECIFIED (0): + Unused + STORAGE_METADATA (2): + General file metadata provided by Cloud + Storage. + """ + METADATATYPE_UNSPECIFIED = 0 + STORAGE_METADATA = 2 + + +class InfoTypeSupportedBy(proto.Enum): + r"""Parts of the APIs which use certain infoTypes. + + Values: + ENUM_TYPE_UNSPECIFIED (0): + Unused. + INSPECT (1): + Supported by the inspect operations. + RISK_ANALYSIS (2): + Supported by the risk analysis operations. + """ + ENUM_TYPE_UNSPECIFIED = 0 + INSPECT = 1 + RISK_ANALYSIS = 2 + + +class DlpJobType(proto.Enum): + r"""An enum to represent the various types of DLP jobs. + + Values: + DLP_JOB_TYPE_UNSPECIFIED (0): + Defaults to INSPECT_JOB. + INSPECT_JOB (1): + The job inspected Google Cloud for sensitive + data. + RISK_ANALYSIS_JOB (2): + The job executed a Risk Analysis computation. + """ + DLP_JOB_TYPE_UNSPECIFIED = 0 + INSPECT_JOB = 1 + RISK_ANALYSIS_JOB = 2 + + +class StoredInfoTypeState(proto.Enum): + r"""State of a StoredInfoType version. + + Values: + STORED_INFO_TYPE_STATE_UNSPECIFIED (0): + Unused + PENDING (1): + StoredInfoType version is being created. + READY (2): + StoredInfoType version is ready for use. + FAILED (3): + StoredInfoType creation failed. All relevant error messages + are returned in the ``StoredInfoTypeVersion`` message. + INVALID (4): + StoredInfoType is no longer valid because artifacts stored + in user-controlled storage were modified. To fix an invalid + StoredInfoType, use the ``UpdateStoredInfoType`` method to + create a new version. + """ + STORED_INFO_TYPE_STATE_UNSPECIFIED = 0 + PENDING = 1 + READY = 2 + FAILED = 3 + INVALID = 4 + + +class ResourceVisibility(proto.Enum): + r"""How broadly a resource has been shared. New items may be + added over time. A higher number means more restricted. + + Values: + RESOURCE_VISIBILITY_UNSPECIFIED (0): + Unused. + RESOURCE_VISIBILITY_PUBLIC (10): + Visible to any user. + RESOURCE_VISIBILITY_RESTRICTED (20): + Visible only to specific users. + """ + RESOURCE_VISIBILITY_UNSPECIFIED = 0 + RESOURCE_VISIBILITY_PUBLIC = 10 + RESOURCE_VISIBILITY_RESTRICTED = 20 + + +class EncryptionStatus(proto.Enum): + r"""How a resource is encrypted. + + Values: + ENCRYPTION_STATUS_UNSPECIFIED (0): + Unused. + ENCRYPTION_GOOGLE_MANAGED (1): + Google manages server-side encryption keys on + your behalf. + ENCRYPTION_CUSTOMER_MANAGED (2): + Customer provides the key. + """ + ENCRYPTION_STATUS_UNSPECIFIED = 0 + ENCRYPTION_GOOGLE_MANAGED = 1 + ENCRYPTION_CUSTOMER_MANAGED = 2 + + +class ExcludeInfoTypes(proto.Message): + r"""List of excluded infoTypes. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + InfoType list in ExclusionRule rule drops a finding when it + overlaps or contained within with a finding of an infoType + from this list. For example, for + ``InspectionRuleSet.info_types`` containing + "PHONE_NUMBER"``and``\ exclusion_rule\ ``containing``\ exclude_info_types.info_types\` + with "EMAIL_ADDRESS" the phone number findings are dropped + if they overlap with EMAIL_ADDRESS finding. That leads to + "555-222-2222@example.org" to generate only a single + finding, namely email address. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + + +class ExcludeByHotword(proto.Message): + r"""The rule to exclude findings based on a hotword. For record + inspection of tables, column names are considered hotwords. An + example of this is to exclude a finding if a BigQuery column + matches a specific pattern. + + Attributes: + hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression pattern defining what + qualifies as a hotword. + proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): + Range of characters within which the entire + hotword must reside. The total length of the + window cannot exceed 1000 characters. The + windowBefore property in proximity should be set + to 1 if the hotword needs to be included in a + column header. + """ + + hotword_regex: storage.CustomInfoType.Regex = proto.Field( + proto.MESSAGE, + number=1, + message=storage.CustomInfoType.Regex, + ) + proximity: storage.CustomInfoType.DetectionRule.Proximity = proto.Field( + proto.MESSAGE, + number=2, + message=storage.CustomInfoType.DetectionRule.Proximity, + ) + + +class ExclusionRule(proto.Message): + r"""The rule that specifies conditions when findings of infoTypes + specified in ``InspectionRuleSet`` are removed from results. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): + Dictionary which defines the rule. + + This field is a member of `oneof`_ ``type``. + regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression which defines the rule. + + This field is a member of `oneof`_ ``type``. + exclude_info_types (google.cloud.dlp_v2.types.ExcludeInfoTypes): + Set of infoTypes for which findings would + affect this rule. + + This field is a member of `oneof`_ ``type``. + exclude_by_hotword (google.cloud.dlp_v2.types.ExcludeByHotword): + Drop if the hotword rule is contained in the + proximate context. For tabular data, the context + includes the column name. + + This field is a member of `oneof`_ ``type``. + matching_type (google.cloud.dlp_v2.types.MatchingType): + How the rule is applied, see MatchingType + documentation for details. + """ + + dictionary: storage.CustomInfoType.Dictionary = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.CustomInfoType.Dictionary, + ) + regex: storage.CustomInfoType.Regex = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message=storage.CustomInfoType.Regex, + ) + exclude_info_types: 'ExcludeInfoTypes' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='ExcludeInfoTypes', + ) + exclude_by_hotword: 'ExcludeByHotword' = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message='ExcludeByHotword', + ) + matching_type: 'MatchingType' = proto.Field( + proto.ENUM, + number=4, + enum='MatchingType', + ) + + +class InspectionRule(proto.Message): + r"""A single inspection rule to be applied to infoTypes, specified in + ``InspectionRuleSet``. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): + Hotword-based detection rule. + + This field is a member of `oneof`_ ``type``. + exclusion_rule (google.cloud.dlp_v2.types.ExclusionRule): + Exclusion rule. + + This field is a member of `oneof`_ ``type``. + """ + + hotword_rule: storage.CustomInfoType.DetectionRule.HotwordRule = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.CustomInfoType.DetectionRule.HotwordRule, + ) + exclusion_rule: 'ExclusionRule' = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message='ExclusionRule', + ) + + +class InspectionRuleSet(proto.Message): + r"""Rule set for modifying a set of infoTypes to alter behavior + under certain circumstances, depending on the specific details + of the rules within the set. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + List of infoTypes this rule set is applied + to. + rules (MutableSequence[google.cloud.dlp_v2.types.InspectionRule]): + Set of rules to be applied to infoTypes. The + rules are applied in order. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + rules: MutableSequence['InspectionRule'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='InspectionRule', + ) + + +class InspectConfig(proto.Message): + r"""Configuration description of the scanning process. When used with + redactContent only info_types and min_likelihood are currently used. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + Restricts what info_types to look for. The values must + correspond to InfoType values returned by ListInfoTypes or + listed at + https://cloud.google.com/dlp/docs/infotypes-reference. + + When no InfoTypes or CustomInfoTypes are specified in a + request, the system may automatically choose what detectors + to run. By default this may be all types, but may change + over time as detectors are updated. + + If you need precise control and predictability as to what + detectors are run you should specify specific InfoTypes + listed in the reference, otherwise a default list will be + used, which may change over time. + min_likelihood (google.cloud.dlp_v2.types.Likelihood): + Only returns findings equal or above this + threshold. The default is POSSIBLE. + See https://cloud.google.com/dlp/docs/likelihood + to learn more. + limits (google.cloud.dlp_v2.types.InspectConfig.FindingLimits): + Configuration to control the number of findings returned. + This is not used for data profiling. + + When redacting sensitive data from images, finding limits + don't apply. They can cause unexpected or inconsistent + results, where only some data is redacted. Don't include + finding limits in + [RedactImage][google.privacy.dlp.v2.DlpService.RedactImage] + requests. Otherwise, Cloud DLP returns an error. + include_quote (bool): + When true, a contextual quote from the data that triggered a + finding is included in the response; see + [Finding.quote][google.privacy.dlp.v2.Finding.quote]. This + is not used for data profiling. + exclude_info_types (bool): + When true, excludes type information of the + findings. This is not used for data profiling. + custom_info_types (MutableSequence[google.cloud.dlp_v2.types.CustomInfoType]): + CustomInfoTypes provided by the user. See + https://cloud.google.com/dlp/docs/creating-custom-infotypes + to learn more. + content_options (MutableSequence[google.cloud.dlp_v2.types.ContentOption]): + Deprecated and unused. + rule_set (MutableSequence[google.cloud.dlp_v2.types.InspectionRuleSet]): + Set of rules to apply to the findings for + this InspectConfig. Exclusion rules, contained + in the set are executed in the end, other rules + are executed in the order they are specified for + each info type. + """ + + class FindingLimits(proto.Message): + r"""Configuration to control the number of findings returned for + inspection. This is not used for de-identification or data + profiling. + + When redacting sensitive data from images, finding limits don't + apply. They can cause unexpected or inconsistent results, where only + some data is redacted. Don't include finding limits in + [RedactImage][google.privacy.dlp.v2.DlpService.RedactImage] + requests. Otherwise, Cloud DLP returns an error. + + Attributes: + max_findings_per_item (int): + Max number of findings that will be returned for each item + scanned. When set within ``InspectJobConfig``, the maximum + returned is 2000 regardless if this is set higher. When set + within ``InspectContentRequest``, this field is ignored. + max_findings_per_request (int): + Max number of findings that will be returned per + request/job. When set within ``InspectContentRequest``, the + maximum returned is 2000 regardless if this is set higher. + max_findings_per_info_type (MutableSequence[google.cloud.dlp_v2.types.InspectConfig.FindingLimits.InfoTypeLimit]): + Configuration of findings limit given for + specified infoTypes. + """ + + class InfoTypeLimit(proto.Message): + r"""Max findings configuration per infoType, per content item or + long running DlpJob. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Type of information the findings limit applies to. Only one + limit per info_type should be provided. If InfoTypeLimit + does not have an info_type, the DLP API applies the limit + against all info_types that are found but not specified in + another InfoTypeLimit. + max_findings (int): + Max findings limit for the given infoType. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + max_findings: int = proto.Field( + proto.INT32, + number=2, + ) + + max_findings_per_item: int = proto.Field( + proto.INT32, + number=1, + ) + max_findings_per_request: int = proto.Field( + proto.INT32, + number=2, + ) + max_findings_per_info_type: MutableSequence['InspectConfig.FindingLimits.InfoTypeLimit'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='InspectConfig.FindingLimits.InfoTypeLimit', + ) + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + min_likelihood: storage.Likelihood = proto.Field( + proto.ENUM, + number=2, + enum=storage.Likelihood, + ) + limits: FindingLimits = proto.Field( + proto.MESSAGE, + number=3, + message=FindingLimits, + ) + include_quote: bool = proto.Field( + proto.BOOL, + number=4, + ) + exclude_info_types: bool = proto.Field( + proto.BOOL, + number=5, + ) + custom_info_types: MutableSequence[storage.CustomInfoType] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=storage.CustomInfoType, + ) + content_options: MutableSequence['ContentOption'] = proto.RepeatedField( + proto.ENUM, + number=8, + enum='ContentOption', + ) + rule_set: MutableSequence['InspectionRuleSet'] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message='InspectionRuleSet', + ) + + +class ByteContentItem(proto.Message): + r"""Container for bytes to inspect or redact. + + Attributes: + type_ (google.cloud.dlp_v2.types.ByteContentItem.BytesType): + The type of data stored in the bytes string. Default will be + TEXT_UTF8. + data (bytes): + Content data to inspect or redact. + """ + class BytesType(proto.Enum): + r"""The type of data being sent for inspection. To learn more, see + `Supported file + types `__. + + Values: + BYTES_TYPE_UNSPECIFIED (0): + Unused + IMAGE (6): + Any image type. + IMAGE_JPEG (1): + jpeg + IMAGE_BMP (2): + bmp + IMAGE_PNG (3): + png + IMAGE_SVG (4): + svg + TEXT_UTF8 (5): + plain text + WORD_DOCUMENT (7): + docx, docm, dotx, dotm + PDF (8): + pdf + POWERPOINT_DOCUMENT (9): + pptx, pptm, potx, potm, pot + EXCEL_DOCUMENT (10): + xlsx, xlsm, xltx, xltm + AVRO (11): + avro + CSV (12): + csv + TSV (13): + tsv + """ + BYTES_TYPE_UNSPECIFIED = 0 + IMAGE = 6 + IMAGE_JPEG = 1 + IMAGE_BMP = 2 + IMAGE_PNG = 3 + IMAGE_SVG = 4 + TEXT_UTF8 = 5 + WORD_DOCUMENT = 7 + PDF = 8 + POWERPOINT_DOCUMENT = 9 + EXCEL_DOCUMENT = 10 + AVRO = 11 + CSV = 12 + TSV = 13 + + type_: BytesType = proto.Field( + proto.ENUM, + number=1, + enum=BytesType, + ) + data: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class ContentItem(proto.Message): + r""" + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (str): + String data to inspect or redact. + + This field is a member of `oneof`_ ``data_item``. + table (google.cloud.dlp_v2.types.Table): + Structured content for inspection. See + https://cloud.google.com/dlp/docs/inspecting-text#inspecting_a_table + to learn more. + + This field is a member of `oneof`_ ``data_item``. + byte_item (google.cloud.dlp_v2.types.ByteContentItem): + Content data to inspect or redact. Replaces ``type`` and + ``data``. + + This field is a member of `oneof`_ ``data_item``. + """ + + value: str = proto.Field( + proto.STRING, + number=3, + oneof='data_item', + ) + table: 'Table' = proto.Field( + proto.MESSAGE, + number=4, + oneof='data_item', + message='Table', + ) + byte_item: 'ByteContentItem' = proto.Field( + proto.MESSAGE, + number=5, + oneof='data_item', + message='ByteContentItem', + ) + + +class Table(proto.Message): + r"""Structured content to inspect. Up to 50,000 ``Value``\ s per request + allowed. See + https://cloud.google.com/dlp/docs/inspecting-structured-text#inspecting_a_table + to learn more. + + Attributes: + headers (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Headers of the table. + rows (MutableSequence[google.cloud.dlp_v2.types.Table.Row]): + Rows of the table. + """ + + class Row(proto.Message): + r"""Values of the row. + + Attributes: + values (MutableSequence[google.cloud.dlp_v2.types.Value]): + Individual cells. + """ + + values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + + headers: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + rows: MutableSequence[Row] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Row, + ) + + +class InspectResult(proto.Message): + r"""All the findings for a single scanned item. + + Attributes: + findings (MutableSequence[google.cloud.dlp_v2.types.Finding]): + List of findings for an item. + findings_truncated (bool): + If true, then this item might have more + findings than were returned, and the findings + returned are an arbitrary subset of all + findings. The findings list might be truncated + because the input items were too large, or + because the server reached the maximum amount of + resources allowed for a single API call. For + best results, divide the input into smaller + batches. + """ + + findings: MutableSequence['Finding'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Finding', + ) + findings_truncated: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class Finding(proto.Message): + r"""Represents a piece of potentially sensitive content. + + Attributes: + name (str): + Resource name in format + projects/{project}/locations/{location}/findings/{finding} + Populated only when viewing persisted findings. + quote (str): + The content that was found. Even if the content is not + textual, it may be converted to a textual representation + here. Provided if ``include_quote`` is true and the finding + is less than or equal to 4096 bytes long. If the finding + exceeds 4096 bytes in length, the quote may be omitted. + info_type (google.cloud.dlp_v2.types.InfoType): + The type of content that might have been found. Provided if + ``excluded_types`` is false. + likelihood (google.cloud.dlp_v2.types.Likelihood): + Confidence of how likely it is that the ``info_type`` is + correct. + location (google.cloud.dlp_v2.types.Location): + Where the content was found. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp when finding was detected. + quote_info (google.cloud.dlp_v2.types.QuoteInfo): + Contains data parsed from quotes. Only populated if + include_quote was set to true and a supported infoType was + requested. Currently supported infoTypes: DATE, + DATE_OF_BIRTH and TIME. + resource_name (str): + The job that stored the finding. + trigger_name (str): + Job trigger name, if applicable, for this + finding. + labels (MutableMapping[str, str]): + The labels associated with this ``Finding``. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + job_create_time (google.protobuf.timestamp_pb2.Timestamp): + Time the job started that produced this + finding. + job_name (str): + The job that stored the finding. + finding_id (str): + The unique finding id. + """ + + name: str = proto.Field( + proto.STRING, + number=14, + ) + quote: str = proto.Field( + proto.STRING, + number=1, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + message=storage.InfoType, + ) + likelihood: storage.Likelihood = proto.Field( + proto.ENUM, + number=3, + enum=storage.Likelihood, + ) + location: 'Location' = proto.Field( + proto.MESSAGE, + number=4, + message='Location', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + quote_info: 'QuoteInfo' = proto.Field( + proto.MESSAGE, + number=7, + message='QuoteInfo', + ) + resource_name: str = proto.Field( + proto.STRING, + number=8, + ) + trigger_name: str = proto.Field( + proto.STRING, + number=9, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=10, + ) + job_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + job_name: str = proto.Field( + proto.STRING, + number=13, + ) + finding_id: str = proto.Field( + proto.STRING, + number=15, + ) + + +class Location(proto.Message): + r"""Specifies the location of the finding. + + Attributes: + byte_range (google.cloud.dlp_v2.types.Range): + Zero-based byte offsets delimiting the + finding. These are relative to the finding's + containing element. Note that when the content + is not textual, this references the UTF-8 + encoded textual representation of the content. + Omitted if content is an image. + codepoint_range (google.cloud.dlp_v2.types.Range): + Unicode character offsets delimiting the + finding. These are relative to the finding's + containing element. Provided when the content is + text. + content_locations (MutableSequence[google.cloud.dlp_v2.types.ContentLocation]): + List of nested objects pointing to the + precise location of the finding within the file + or record. + container (google.cloud.dlp_v2.types.Container): + Information about the container where this + finding occurred, if available. + """ + + byte_range: 'Range' = proto.Field( + proto.MESSAGE, + number=1, + message='Range', + ) + codepoint_range: 'Range' = proto.Field( + proto.MESSAGE, + number=2, + message='Range', + ) + content_locations: MutableSequence['ContentLocation'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='ContentLocation', + ) + container: 'Container' = proto.Field( + proto.MESSAGE, + number=8, + message='Container', + ) + + +class ContentLocation(proto.Message): + r"""Precise location of the finding within a document, record, + image, or metadata container. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + container_name (str): + Name of the container where the finding is located. The top + level name is the source file name or table name. Names of + some common storage containers are formatted as follows: + + - BigQuery tables: ``{project_id}:{dataset_id}.{table_id}`` + - Cloud Storage files: ``gs://{bucket}/{path}`` + - Datastore namespace: {namespace} + + Nested names could be absent if the embedded object has no + string identifier (for example, an image contained within a + document). + record_location (google.cloud.dlp_v2.types.RecordLocation): + Location within a row or record of a database + table. + + This field is a member of `oneof`_ ``location``. + image_location (google.cloud.dlp_v2.types.ImageLocation): + Location within an image's pixels. + + This field is a member of `oneof`_ ``location``. + document_location (google.cloud.dlp_v2.types.DocumentLocation): + Location data for document files. + + This field is a member of `oneof`_ ``location``. + metadata_location (google.cloud.dlp_v2.types.MetadataLocation): + Location within the metadata for inspected + content. + + This field is a member of `oneof`_ ``location``. + container_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Finding container modification timestamp, if applicable. For + Cloud Storage, this field contains the last file + modification timestamp. For a BigQuery table, this field + contains the last_modified_time property. For Datastore, + this field isn't populated. + container_version (str): + Finding container version, if available + ("generation" for Cloud Storage). + """ + + container_name: str = proto.Field( + proto.STRING, + number=1, + ) + record_location: 'RecordLocation' = proto.Field( + proto.MESSAGE, + number=2, + oneof='location', + message='RecordLocation', + ) + image_location: 'ImageLocation' = proto.Field( + proto.MESSAGE, + number=3, + oneof='location', + message='ImageLocation', + ) + document_location: 'DocumentLocation' = proto.Field( + proto.MESSAGE, + number=5, + oneof='location', + message='DocumentLocation', + ) + metadata_location: 'MetadataLocation' = proto.Field( + proto.MESSAGE, + number=8, + oneof='location', + message='MetadataLocation', + ) + container_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + container_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class MetadataLocation(proto.Message): + r"""Metadata Location + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.dlp_v2.types.MetadataType): + Type of metadata containing the finding. + storage_label (google.cloud.dlp_v2.types.StorageMetadataLabel): + Storage metadata. + + This field is a member of `oneof`_ ``label``. + """ + + type_: 'MetadataType' = proto.Field( + proto.ENUM, + number=1, + enum='MetadataType', + ) + storage_label: 'StorageMetadataLabel' = proto.Field( + proto.MESSAGE, + number=3, + oneof='label', + message='StorageMetadataLabel', + ) + + +class StorageMetadataLabel(proto.Message): + r"""Storage metadata label to indicate which metadata entry + contains findings. + + Attributes: + key (str): + + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DocumentLocation(proto.Message): + r"""Location of a finding within a document. + + Attributes: + file_offset (int): + Offset of the line, from the beginning of the + file, where the finding is located. + """ + + file_offset: int = proto.Field( + proto.INT64, + number=1, + ) + + +class RecordLocation(proto.Message): + r"""Location of a finding within a row or record. + + Attributes: + record_key (google.cloud.dlp_v2.types.RecordKey): + Key of the finding. + field_id (google.cloud.dlp_v2.types.FieldId): + Field id of the field containing the finding. + table_location (google.cloud.dlp_v2.types.TableLocation): + Location within a ``ContentItem.Table``. + """ + + record_key: storage.RecordKey = proto.Field( + proto.MESSAGE, + number=1, + message=storage.RecordKey, + ) + field_id: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + table_location: 'TableLocation' = proto.Field( + proto.MESSAGE, + number=3, + message='TableLocation', + ) + + +class TableLocation(proto.Message): + r"""Location of a finding within a table. + + Attributes: + row_index (int): + The zero-based index of the row where the finding is + located. Only populated for resources that have a natural + ordering, not BigQuery. In BigQuery, to identify the row a + finding came from, populate + BigQueryOptions.identifying_fields with your primary key + column names and when you store the findings the value of + those columns will be stored inside of Finding. + """ + + row_index: int = proto.Field( + proto.INT64, + number=1, + ) + + +class Container(proto.Message): + r"""Represents a container that may contain DLP findings. + Examples of a container include a file, table, or database + record. + + Attributes: + type_ (str): + Container type, for example BigQuery or Cloud + Storage. + project_id (str): + Project where the finding was found. + Can be different from the project that owns the + finding. + full_path (str): + A string representation of the full container + name. Examples: + - BigQuery: 'Project:DataSetId.TableId' + - Cloud Storage: + 'gs://Bucket/folders/filename.txt' + root_path (str): + The root of the container. Examples: + + - For BigQuery table ``project_id:dataset_id.table_id``, + the root is ``dataset_id`` + - For Cloud Storage file + ``gs://bucket/folder/filename.txt``, the root is + ``gs://bucket`` + relative_path (str): + The rest of the path after the root. Examples: + + - For BigQuery table ``project_id:dataset_id.table_id``, + the relative path is ``table_id`` + - For Cloud Storage file + ``gs://bucket/folder/filename.txt``, the relative path is + ``folder/filename.txt`` + update_time (google.protobuf.timestamp_pb2.Timestamp): + Findings container modification timestamp, if applicable. + For Cloud Storage, this field contains the last file + modification timestamp. For a BigQuery table, this field + contains the last_modified_time property. For Datastore, + this field isn't populated. + version (str): + Findings container version, if available + ("generation" for Cloud Storage). + """ + + type_: str = proto.Field( + proto.STRING, + number=1, + ) + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + full_path: str = proto.Field( + proto.STRING, + number=3, + ) + root_path: str = proto.Field( + proto.STRING, + number=4, + ) + relative_path: str = proto.Field( + proto.STRING, + number=5, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class Range(proto.Message): + r"""Generic half-open interval [start, end) + + Attributes: + start (int): + Index of the first character of the range + (inclusive). + end (int): + Index of the last character of the range + (exclusive). + """ + + start: int = proto.Field( + proto.INT64, + number=1, + ) + end: int = proto.Field( + proto.INT64, + number=2, + ) + + +class ImageLocation(proto.Message): + r"""Location of the finding within an image. + + Attributes: + bounding_boxes (MutableSequence[google.cloud.dlp_v2.types.BoundingBox]): + Bounding boxes locating the pixels within the + image containing the finding. + """ + + bounding_boxes: MutableSequence['BoundingBox'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='BoundingBox', + ) + + +class BoundingBox(proto.Message): + r"""Bounding box encompassing detected text within an image. + + Attributes: + top (int): + Top coordinate of the bounding box. (0,0) is + upper left. + left (int): + Left coordinate of the bounding box. (0,0) is + upper left. + width (int): + Width of the bounding box in pixels. + height (int): + Height of the bounding box in pixels. + """ + + top: int = proto.Field( + proto.INT32, + number=1, + ) + left: int = proto.Field( + proto.INT32, + number=2, + ) + width: int = proto.Field( + proto.INT32, + number=3, + ) + height: int = proto.Field( + proto.INT32, + number=4, + ) + + +class RedactImageRequest(proto.Message): + r"""Request to search for potentially sensitive info in an image + and redact it by covering it with a colored rectangle. + + Attributes: + parent (str): + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + location_id (str): + Deprecated. This field has no effect. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. + image_redaction_configs (MutableSequence[google.cloud.dlp_v2.types.RedactImageRequest.ImageRedactionConfig]): + The configuration for specifying what content + to redact from images. + include_findings (bool): + Whether the response should include findings + along with the redacted image. + byte_item (google.cloud.dlp_v2.types.ByteContentItem): + The content must be PNG, JPEG, SVG or BMP. + """ + + class ImageRedactionConfig(proto.Message): + r"""Configuration for determining how redaction of images should + occur. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Only one per info_type should be provided per request. If + not specified, and redact_all_text is false, the DLP API + will redact all text that it matches against all info_types + that are found, but not specified in another + ImageRedactionConfig. + + This field is a member of `oneof`_ ``target``. + redact_all_text (bool): + If true, all text found in the image, regardless whether it + matches an info_type, is redacted. Only one should be + provided. + + This field is a member of `oneof`_ ``target``. + redaction_color (google.cloud.dlp_v2.types.Color): + The color to use when redacting content from + an image. If not specified, the default is + black. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + oneof='target', + message=storage.InfoType, + ) + redact_all_text: bool = proto.Field( + proto.BOOL, + number=2, + oneof='target', + ) + redaction_color: 'Color' = proto.Field( + proto.MESSAGE, + number=3, + message='Color', + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + location_id: str = proto.Field( + proto.STRING, + number=8, + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + image_redaction_configs: MutableSequence[ImageRedactionConfig] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=ImageRedactionConfig, + ) + include_findings: bool = proto.Field( + proto.BOOL, + number=6, + ) + byte_item: 'ByteContentItem' = proto.Field( + proto.MESSAGE, + number=7, + message='ByteContentItem', + ) + + +class Color(proto.Message): + r"""Represents a color in the RGB color space. + + Attributes: + red (float): + The amount of red in the color as a value in the interval + [0, 1]. + green (float): + The amount of green in the color as a value in the interval + [0, 1]. + blue (float): + The amount of blue in the color as a value in the interval + [0, 1]. + """ + + red: float = proto.Field( + proto.FLOAT, + number=1, + ) + green: float = proto.Field( + proto.FLOAT, + number=2, + ) + blue: float = proto.Field( + proto.FLOAT, + number=3, + ) + + +class RedactImageResponse(proto.Message): + r"""Results of redacting an image. + + Attributes: + redacted_image (bytes): + The redacted image. The type will be the same + as the original image. + extracted_text (str): + If an image was being inspected and the InspectConfig's + include_quote was set to true, then this field will include + all text, if any, that was found in the image. + inspect_result (google.cloud.dlp_v2.types.InspectResult): + The findings. Populated when include_findings in the request + is true. + """ + + redacted_image: bytes = proto.Field( + proto.BYTES, + number=1, + ) + extracted_text: str = proto.Field( + proto.STRING, + number=2, + ) + inspect_result: 'InspectResult' = proto.Field( + proto.MESSAGE, + number=3, + message='InspectResult', + ) + + +class DeidentifyContentRequest(proto.Message): + r"""Request to de-identify a ContentItem. + + Attributes: + parent (str): + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): + Configuration for the de-identification of the content item. + Items specified here will override the template referenced + by the deidentify_template_name argument. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. Items specified here will + override the template referenced by the + inspect_template_name argument. + item (google.cloud.dlp_v2.types.ContentItem): + The item to de-identify. Will be treated as text. + + This value must be of type + [Table][google.privacy.dlp.v2.Table] if your + [deidentify_config][google.privacy.dlp.v2.DeidentifyContentRequest.deidentify_config] + is a + [RecordTransformations][google.privacy.dlp.v2.RecordTransformations] + object. + inspect_template_name (str): + Template to use. Any configuration directly specified in + inspect_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + deidentify_template_name (str): + Template to use. Any configuration directly specified in + deidentify_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + deidentify_config: 'DeidentifyConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyConfig', + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='InspectConfig', + ) + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=4, + message='ContentItem', + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=5, + ) + deidentify_template_name: str = proto.Field( + proto.STRING, + number=6, + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class DeidentifyContentResponse(proto.Message): + r"""Results of de-identifying a ContentItem. + + Attributes: + item (google.cloud.dlp_v2.types.ContentItem): + The de-identified item. + overview (google.cloud.dlp_v2.types.TransformationOverview): + An overview of the changes that were made on the ``item``. + """ + + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=1, + message='ContentItem', + ) + overview: 'TransformationOverview' = proto.Field( + proto.MESSAGE, + number=2, + message='TransformationOverview', + ) + + +class ReidentifyContentRequest(proto.Message): + r"""Request to re-identify an item. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + reidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): + Configuration for the re-identification of the content item. + This field shares the same proto message type that is used + for de-identification, however its usage here is for the + reversal of the previous de-identification. + Re-identification is performed by examining the + transformations used to de-identify the items and executing + the reverse. This requires that only reversible + transformations be provided here. The reversible + transformations are: + + - ``CryptoDeterministicConfig`` + - ``CryptoReplaceFfxFpeConfig`` + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. + item (google.cloud.dlp_v2.types.ContentItem): + The item to re-identify. Will be treated as + text. + inspect_template_name (str): + Template to use. Any configuration directly specified in + ``inspect_config`` will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + reidentify_template_name (str): + Template to use. References an instance of + ``DeidentifyTemplate``. Any configuration directly specified + in ``reidentify_config`` or ``inspect_config`` will override + those set in the template. The ``DeidentifyTemplate`` used + must include only reversible transformations. Singular + fields that are set in this request will replace their + corresponding fields in the template. Repeated fields are + appended. Singular sub-messages and groups are recursively + merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + reidentify_config: 'DeidentifyConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyConfig', + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='InspectConfig', + ) + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=4, + message='ContentItem', + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=5, + ) + reidentify_template_name: str = proto.Field( + proto.STRING, + number=6, + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ReidentifyContentResponse(proto.Message): + r"""Results of re-identifying an item. + + Attributes: + item (google.cloud.dlp_v2.types.ContentItem): + The re-identified item. + overview (google.cloud.dlp_v2.types.TransformationOverview): + An overview of the changes that were made to the ``item``. + """ + + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=1, + message='ContentItem', + ) + overview: 'TransformationOverview' = proto.Field( + proto.MESSAGE, + number=2, + message='TransformationOverview', + ) + + +class InspectContentRequest(proto.Message): + r"""Request to search for potentially sensitive info in a + ContentItem. + + Attributes: + parent (str): + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. What specified here will + override the template referenced by the + inspect_template_name argument. + item (google.cloud.dlp_v2.types.ContentItem): + The item to inspect. + inspect_template_name (str): + Template to use. Any configuration directly specified in + inspect_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=3, + message='ContentItem', + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class InspectContentResponse(proto.Message): + r"""Results of inspecting an item. + + Attributes: + result (google.cloud.dlp_v2.types.InspectResult): + The findings. + """ + + result: 'InspectResult' = proto.Field( + proto.MESSAGE, + number=1, + message='InspectResult', + ) + + +class OutputStorageConfig(proto.Message): + r"""Cloud repository for storing output. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Store findings in an existing table or a new table in an + existing dataset. If table_id is not set a new one will be + generated for you with the following format: + dlp_googleapis_yyyy_mm_dd_[dlp_job_id]. Pacific time zone + will be used for generating the date details. + + For Inspect, each column in an existing output table must + have the same name, type, and mode of a field in the + ``Finding`` object. + + For Risk, an existing output table should be the output of a + previous Risk analysis job run on the same source table, + with the same privacy metric and quasi-identifiers. Risk + jobs that analyze the same table but compute a different + privacy metric, or use different sets of quasi-identifiers, + cannot store their results in the same table. + + This field is a member of `oneof`_ ``type``. + output_schema (google.cloud.dlp_v2.types.OutputStorageConfig.OutputSchema): + Schema used for writing the findings for Inspect jobs. This + field is only used for Inspect and must be unspecified for + Risk jobs. Columns are derived from the ``Finding`` object. + If appending to an existing table, any columns from the + predefined schema that are missing will be added. No columns + in the existing table will be deleted. + + If unspecified, then all available columns will be used for + a new table or an (existing) table with no schema, and no + changes will be made to an existing table that has a schema. + Only for use with external storage. + """ + class OutputSchema(proto.Enum): + r"""Predefined schemas for storing findings. + Only for use with external storage. + + Values: + OUTPUT_SCHEMA_UNSPECIFIED (0): + Unused. + BASIC_COLUMNS (1): + Basic schema including only ``info_type``, ``quote``, + ``certainty``, and ``timestamp``. + GCS_COLUMNS (2): + Schema tailored to findings from scanning + Cloud Storage. + DATASTORE_COLUMNS (3): + Schema tailored to findings from scanning + Google Datastore. + BIG_QUERY_COLUMNS (4): + Schema tailored to findings from scanning + Google BigQuery. + ALL_COLUMNS (5): + Schema containing all columns. + """ + OUTPUT_SCHEMA_UNSPECIFIED = 0 + BASIC_COLUMNS = 1 + GCS_COLUMNS = 2 + DATASTORE_COLUMNS = 3 + BIG_QUERY_COLUMNS = 4 + ALL_COLUMNS = 5 + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.BigQueryTable, + ) + output_schema: OutputSchema = proto.Field( + proto.ENUM, + number=3, + enum=OutputSchema, + ) + + +class InfoTypeStats(proto.Message): + r"""Statistics regarding a specific InfoType. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + The type of finding this stat is for. + count (int): + Number of findings for this infoType. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + count: int = proto.Field( + proto.INT64, + number=2, + ) + + +class InspectDataSourceDetails(proto.Message): + r"""The results of an inspect DataSource job. + + Attributes: + requested_options (google.cloud.dlp_v2.types.InspectDataSourceDetails.RequestedOptions): + The configuration used for this job. + result (google.cloud.dlp_v2.types.InspectDataSourceDetails.Result): + A summary of the outcome of this inspection + job. + """ + + class RequestedOptions(proto.Message): + r"""Snapshot of the inspection configuration. + + Attributes: + snapshot_inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + If run with an InspectTemplate, a snapshot of + its state at the time of this run. + job_config (google.cloud.dlp_v2.types.InspectJobConfig): + Inspect config. + """ + + snapshot_inspect_template: 'InspectTemplate' = proto.Field( + proto.MESSAGE, + number=1, + message='InspectTemplate', + ) + job_config: 'InspectJobConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='InspectJobConfig', + ) + + class Result(proto.Message): + r"""All result fields mentioned below are updated while the job + is processing. + + Attributes: + processed_bytes (int): + Total size in bytes that were processed. + total_estimated_bytes (int): + Estimate of the number of bytes to process. + info_type_stats (MutableSequence[google.cloud.dlp_v2.types.InfoTypeStats]): + Statistics of how many instances of each info + type were found during inspect job. + hybrid_stats (google.cloud.dlp_v2.types.HybridInspectStatistics): + Statistics related to the processing of + hybrid inspect. + """ + + processed_bytes: int = proto.Field( + proto.INT64, + number=1, + ) + total_estimated_bytes: int = proto.Field( + proto.INT64, + number=2, + ) + info_type_stats: MutableSequence['InfoTypeStats'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='InfoTypeStats', + ) + hybrid_stats: 'HybridInspectStatistics' = proto.Field( + proto.MESSAGE, + number=7, + message='HybridInspectStatistics', + ) + + requested_options: RequestedOptions = proto.Field( + proto.MESSAGE, + number=2, + message=RequestedOptions, + ) + result: Result = proto.Field( + proto.MESSAGE, + number=3, + message=Result, + ) + + +class HybridInspectStatistics(proto.Message): + r"""Statistics related to processing hybrid inspect requests. + + Attributes: + processed_count (int): + The number of hybrid inspection requests + processed within this job. + aborted_count (int): + The number of hybrid inspection requests + aborted because the job ran out of quota or was + ended before they could be processed. + pending_count (int): + The number of hybrid requests currently being processed. + Only populated when called via method ``getDlpJob``. A burst + of traffic may cause hybrid inspect requests to be enqueued. + Processing will take place as quickly as possible, but + resource limitations may impact how long a request is + enqueued for. + """ + + processed_count: int = proto.Field( + proto.INT64, + number=1, + ) + aborted_count: int = proto.Field( + proto.INT64, + number=2, + ) + pending_count: int = proto.Field( + proto.INT64, + number=3, + ) + + +class InfoTypeDescription(proto.Message): + r"""InfoType description. + + Attributes: + name (str): + Internal name of the infoType. + display_name (str): + Human readable form of the infoType name. + supported_by (MutableSequence[google.cloud.dlp_v2.types.InfoTypeSupportedBy]): + Which parts of the API supports this + InfoType. + description (str): + Description of the infotype. Translated when + language is provided in the request. + versions (MutableSequence[google.cloud.dlp_v2.types.VersionDescription]): + A list of available versions for the + infotype. + categories (MutableSequence[google.cloud.dlp_v2.types.InfoTypeCategory]): + The category of the infoType. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + supported_by: MutableSequence['InfoTypeSupportedBy'] = proto.RepeatedField( + proto.ENUM, + number=3, + enum='InfoTypeSupportedBy', + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + versions: MutableSequence['VersionDescription'] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message='VersionDescription', + ) + categories: MutableSequence['InfoTypeCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message='InfoTypeCategory', + ) + + +class InfoTypeCategory(proto.Message): + r"""Classification of infoTypes to organize them according to + geographic location, industry, and data type. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + location_category (google.cloud.dlp_v2.types.InfoTypeCategory.LocationCategory): + The region or country that issued the ID or + document represented by the infoType. + + This field is a member of `oneof`_ ``category``. + industry_category (google.cloud.dlp_v2.types.InfoTypeCategory.IndustryCategory): + The group of relevant businesses where this + infoType is commonly used + + This field is a member of `oneof`_ ``category``. + type_category (google.cloud.dlp_v2.types.InfoTypeCategory.TypeCategory): + The class of identifiers where this infoType + belongs + + This field is a member of `oneof`_ ``category``. + """ + class LocationCategory(proto.Enum): + r"""Enum of the current locations. + We might add more locations in the future. + + Values: + LOCATION_UNSPECIFIED (0): + Unused location + GLOBAL (1): + The infoType is not issued by or tied to a + specific region, but is used almost everywhere. + ARGENTINA (2): + The infoType is typically used in Argentina. + AUSTRALIA (3): + The infoType is typically used in Australia. + BELGIUM (4): + The infoType is typically used in Belgium. + BRAZIL (5): + The infoType is typically used in Brazil. + CANADA (6): + The infoType is typically used in Canada. + CHILE (7): + The infoType is typically used in Chile. + CHINA (8): + The infoType is typically used in China. + COLOMBIA (9): + The infoType is typically used in Colombia. + DENMARK (10): + The infoType is typically used in Denmark. + FRANCE (11): + The infoType is typically used in France. + FINLAND (12): + The infoType is typically used in Finland. + GERMANY (13): + The infoType is typically used in Germany. + HONG_KONG (14): + The infoType is typically used in Hong Kong. + INDIA (15): + The infoType is typically used in India. + INDONESIA (16): + The infoType is typically used in Indonesia. + IRELAND (17): + The infoType is typically used in Ireland. + ISRAEL (18): + The infoType is typically used in Israel. + ITALY (19): + The infoType is typically used in Italy. + JAPAN (20): + The infoType is typically used in Japan. + KOREA (21): + The infoType is typically used in Korea. + MEXICO (22): + The infoType is typically used in Mexico. + THE_NETHERLANDS (23): + The infoType is typically used in the + Netherlands. + NORWAY (24): + The infoType is typically used in Norway. + PARAGUAY (25): + The infoType is typically used in Paraguay. + PERU (26): + The infoType is typically used in Peru. + POLAND (27): + The infoType is typically used in Poland. + PORTUGAL (28): + The infoType is typically used in Portugal. + SINGAPORE (29): + The infoType is typically used in Singapore. + SOUTH_AFRICA (30): + The infoType is typically used in South + Africa. + SPAIN (31): + The infoType is typically used in Spain. + SWEDEN (32): + The infoType is typically used in Sweden. + TAIWAN (33): + The infoType is typically used in Taiwan. + THAILAND (34): + The infoType is typically used in Thailand. + TURKEY (35): + The infoType is typically used in Turkey. + UNITED_KINGDOM (36): + The infoType is typically used in the United + Kingdom. + UNITED_STATES (37): + The infoType is typically used in the United + States. + URUGUAY (38): + The infoType is typically used in Uruguay. + VENEZUELA (39): + The infoType is typically used in Venezuela. + INTERNAL (40): + The infoType is typically used in Google + internally. + NEW_ZEALAND (41): + The infoType is typically used in New + Zealand. + """ + LOCATION_UNSPECIFIED = 0 + GLOBAL = 1 + ARGENTINA = 2 + AUSTRALIA = 3 + BELGIUM = 4 + BRAZIL = 5 + CANADA = 6 + CHILE = 7 + CHINA = 8 + COLOMBIA = 9 + DENMARK = 10 + FRANCE = 11 + FINLAND = 12 + GERMANY = 13 + HONG_KONG = 14 + INDIA = 15 + INDONESIA = 16 + IRELAND = 17 + ISRAEL = 18 + ITALY = 19 + JAPAN = 20 + KOREA = 21 + MEXICO = 22 + THE_NETHERLANDS = 23 + NORWAY = 24 + PARAGUAY = 25 + PERU = 26 + POLAND = 27 + PORTUGAL = 28 + SINGAPORE = 29 + SOUTH_AFRICA = 30 + SPAIN = 31 + SWEDEN = 32 + TAIWAN = 33 + THAILAND = 34 + TURKEY = 35 + UNITED_KINGDOM = 36 + UNITED_STATES = 37 + URUGUAY = 38 + VENEZUELA = 39 + INTERNAL = 40 + NEW_ZEALAND = 41 + + class IndustryCategory(proto.Enum): + r"""Enum of the current industries in the category. + We might add more industries in the future. + + Values: + INDUSTRY_UNSPECIFIED (0): + Unused industry + FINANCE (1): + The infoType is typically used in the finance + industry. + HEALTH (2): + The infoType is typically used in the health + industry. + TELECOMMUNICATIONS (3): + The infoType is typically used in the + telecommunications industry. + """ + INDUSTRY_UNSPECIFIED = 0 + FINANCE = 1 + HEALTH = 2 + TELECOMMUNICATIONS = 3 + + class TypeCategory(proto.Enum): + r"""Enum of the current types in the category. + We might add more types in the future. + + Values: + TYPE_UNSPECIFIED (0): + Unused type + PII (1): + Personally identifiable information, for + example, a name or phone number + SPII (2): + Personally identifiable information that is + especially sensitive, for example, a passport + number. + DEMOGRAPHIC (3): + Attributes that can partially identify + someone, especially in combination with other + attributes, like age, height, and gender. + CREDENTIAL (4): + Confidential or secret information, for + example, a password. + GOVERNMENT_ID (5): + An identification document issued by a + government. + DOCUMENT (6): + A document, for example, a resume or source + code. + CONTEXTUAL_INFORMATION (7): + Information that is not sensitive on its own, + but provides details about the circumstances + surrounding an entity or an event. + """ + TYPE_UNSPECIFIED = 0 + PII = 1 + SPII = 2 + DEMOGRAPHIC = 3 + CREDENTIAL = 4 + GOVERNMENT_ID = 5 + DOCUMENT = 6 + CONTEXTUAL_INFORMATION = 7 + + location_category: LocationCategory = proto.Field( + proto.ENUM, + number=1, + oneof='category', + enum=LocationCategory, + ) + industry_category: IndustryCategory = proto.Field( + proto.ENUM, + number=2, + oneof='category', + enum=IndustryCategory, + ) + type_category: TypeCategory = proto.Field( + proto.ENUM, + number=3, + oneof='category', + enum=TypeCategory, + ) + + +class VersionDescription(proto.Message): + r"""Details about each available version for an infotype. + + Attributes: + version (str): + Name of the version + description (str): + Description of the version. + """ + + version: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListInfoTypesRequest(proto.Message): + r"""Request for the list of infoTypes. + + Attributes: + parent (str): + The parent resource name. + + The format of this value is as follows: + + :: + + locations/LOCATION_ID + language_code (str): + BCP-47 language code for localized infoType + friendly names. If omitted, or if localized + strings are not available, en-US strings will be + returned. + filter (str): + filter to only return infoTypes supported by certain parts + of the API. Defaults to supported_by=INSPECT. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=4, + ) + language_code: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + location_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListInfoTypesResponse(proto.Message): + r"""Response to the ListInfoTypes request. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoTypeDescription]): + Set of sensitive infoTypes. + """ + + info_types: MutableSequence['InfoTypeDescription'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='InfoTypeDescription', + ) + + +class RiskAnalysisJobConfig(proto.Message): + r"""Configuration for a risk analysis job. See + https://cloud.google.com/dlp/docs/concepts-risk-analysis to + learn more. + + Attributes: + privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): + Privacy metric to compute. + source_table (google.cloud.dlp_v2.types.BigQueryTable): + Input dataset to compute metrics over. + actions (MutableSequence[google.cloud.dlp_v2.types.Action]): + Actions to execute at the completion of the + job. Are executed in the order provided. + """ + + privacy_metric: 'PrivacyMetric' = proto.Field( + proto.MESSAGE, + number=1, + message='PrivacyMetric', + ) + source_table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=2, + message=storage.BigQueryTable, + ) + actions: MutableSequence['Action'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='Action', + ) + + +class QuasiId(proto.Message): + r"""A column with a semantic tag attached. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Required. Identifies the column. + info_type (google.cloud.dlp_v2.types.InfoType): + A column can be tagged with a InfoType to use the relevant + public dataset as a statistical model of population, if + available. We currently support US ZIP codes, region codes, + ages and genders. To programmatically obtain the list of + supported InfoTypes, use ListInfoTypes with the + supported_by=RISK_ANALYSIS filter. + + This field is a member of `oneof`_ ``tag``. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + + This field is a member of `oneof`_ ``tag``. + inferred (google.protobuf.empty_pb2.Empty): + If no semantic tag is indicated, we infer the + statistical model from the distribution of + values in the input data + + This field is a member of `oneof`_ ``tag``. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + oneof='tag', + message=storage.InfoType, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=3, + oneof='tag', + ) + inferred: empty_pb2.Empty = proto.Field( + proto.MESSAGE, + number=4, + oneof='tag', + message=empty_pb2.Empty, + ) + + +class StatisticalTable(proto.Message): + r"""An auxiliary table containing statistical information on the + relative frequency of different quasi-identifiers values. It has + one or several quasi-identifiers columns, and one column that + indicates the relative frequency of each quasi-identifier tuple. + If a tuple is present in the data but not in the auxiliary + table, the corresponding relative frequency is assumed to be + zero (and thus, the tuple is highly reidentifiable). + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Required. Auxiliary table location. + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.StatisticalTable.QuasiIdentifierField]): + Required. Quasi-identifier columns. + relative_frequency (google.cloud.dlp_v2.types.FieldId): + Required. The relative frequency column must + contain a floating-point number between 0 and 1 + (inclusive). Null values are assumed to be zero. + """ + + class QuasiIdentifierField(proto.Message): + r"""A quasi-identifier column has a custom_tag, used to know which + column in the data corresponds to which column in the statistical + model. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Identifies the column. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=2, + ) + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=3, + message=storage.BigQueryTable, + ) + quasi_ids: MutableSequence[QuasiIdentifierField] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=QuasiIdentifierField, + ) + relative_frequency: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + + +class PrivacyMetric(proto.Message): + r"""Privacy metric to compute for reidentification risk analysis. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + numerical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.NumericalStatsConfig): + Numerical stats + + This field is a member of `oneof`_ ``type``. + categorical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.CategoricalStatsConfig): + Categorical stats + + This field is a member of `oneof`_ ``type``. + k_anonymity_config (google.cloud.dlp_v2.types.PrivacyMetric.KAnonymityConfig): + K-anonymity + + This field is a member of `oneof`_ ``type``. + l_diversity_config (google.cloud.dlp_v2.types.PrivacyMetric.LDiversityConfig): + l-diversity + + This field is a member of `oneof`_ ``type``. + k_map_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig): + k-map + + This field is a member of `oneof`_ ``type``. + delta_presence_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.DeltaPresenceEstimationConfig): + delta-presence + + This field is a member of `oneof`_ ``type``. + """ + + class NumericalStatsConfig(proto.Message): + r"""Compute numerical stats over an individual column, including + min, max, and quantiles. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Field to compute numerical stats on. + Supported types are integer, float, date, + datetime, timestamp, time. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + + class CategoricalStatsConfig(proto.Message): + r"""Compute numerical stats over an individual column, including + number of distinct values and value count distribution. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Field to compute categorical stats on. All + column types are supported except for arrays and + structs. However, it may be more informative to + use NumericalStats when the field type is + supported, depending on the data. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + + class KAnonymityConfig(proto.Message): + r"""k-anonymity metric, used for analysis of reidentification + risk. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Set of fields to compute k-anonymity over. + When multiple fields are specified, they are + considered a single composite key. Structs and + repeated data types are not supported; however, + nested fields are supported so long as they are + not structs themselves or nested within a + repeated field. + entity_id (google.cloud.dlp_v2.types.EntityId): + Message indicating that multiple rows might be associated to + a single individual. If the same entity_id is associated to + multiple quasi-identifier tuples over distinct rows, we + consider the entire collection of tuples as the composite + quasi-identifier. This collection is a multiset: the order + in which the different tuples appear in the dataset is + ignored, but their frequency is taken into account. + + Important note: a maximum of 1000 rows can be associated to + a single entity ID. If more rows are associated with the + same entity ID, some might be ignored. + """ + + quasi_ids: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + entity_id: storage.EntityId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.EntityId, + ) + + class LDiversityConfig(proto.Message): + r"""l-diversity metric, used for analysis of reidentification + risk. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Set of quasi-identifiers indicating how + equivalence classes are defined for the + l-diversity computation. When multiple fields + are specified, they are considered a single + composite key. + sensitive_attribute (google.cloud.dlp_v2.types.FieldId): + Sensitive field for computing the l-value. + """ + + quasi_ids: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + sensitive_attribute: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + + class KMapEstimationConfig(proto.Message): + r"""Reidentifiability metric. This corresponds to a risk model + similar to what is called "journalist risk" in the literature, + except the attack dataset is statistically modeled instead of + being perfectly known. This can be done using publicly available + data (like the US Census), or using a custom statistical model + (indicated as one or several BigQuery tables), or by + extrapolating from the distribution of values in the input + dataset. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.TaggedField]): + Required. Fields considered to be + quasi-identifiers. No two columns can have the + same tag. + region_code (str): + ISO 3166-1 alpha-2 region code to use in the statistical + modeling. Set if no column is tagged with a region-specific + InfoType (like US_ZIP_5) or a region code. + auxiliary_tables (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable]): + Several auxiliary tables can be used in the analysis. Each + custom_tag used to tag a quasi-identifiers column must + appear in exactly one column of one auxiliary table. + """ + + class TaggedField(proto.Message): + r"""A column with a semantic tag attached. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Required. Identifies the column. + info_type (google.cloud.dlp_v2.types.InfoType): + A column can be tagged with a InfoType to use the relevant + public dataset as a statistical model of population, if + available. We currently support US ZIP codes, region codes, + ages and genders. To programmatically obtain the list of + supported InfoTypes, use ListInfoTypes with the + supported_by=RISK_ANALYSIS filter. + + This field is a member of `oneof`_ ``tag``. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + + This field is a member of `oneof`_ ``tag``. + inferred (google.protobuf.empty_pb2.Empty): + If no semantic tag is indicated, we infer the + statistical model from the distribution of + values in the input data + + This field is a member of `oneof`_ ``tag``. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + oneof='tag', + message=storage.InfoType, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=3, + oneof='tag', + ) + inferred: empty_pb2.Empty = proto.Field( + proto.MESSAGE, + number=4, + oneof='tag', + message=empty_pb2.Empty, + ) + + class AuxiliaryTable(proto.Message): + r"""An auxiliary table contains statistical information on the + relative frequency of different quasi-identifiers values. It has + one or several quasi-identifiers columns, and one column that + indicates the relative frequency of each quasi-identifier tuple. + If a tuple is present in the data but not in the auxiliary + table, the corresponding relative frequency is assumed to be + zero (and thus, the tuple is highly reidentifiable). + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Required. Auxiliary table location. + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField]): + Required. Quasi-identifier columns. + relative_frequency (google.cloud.dlp_v2.types.FieldId): + Required. The relative frequency column must + contain a floating-point number between 0 and 1 + (inclusive). Null values are assumed to be zero. + """ + + class QuasiIdField(proto.Message): + r"""A quasi-identifier column has a custom_tag, used to know which + column in the data corresponds to which column in the statistical + model. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Identifies the column. + custom_tag (str): + A auxiliary field. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=2, + ) + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=3, + message=storage.BigQueryTable, + ) + quasi_ids: MutableSequence['PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField', + ) + relative_frequency: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + + quasi_ids: MutableSequence['PrivacyMetric.KMapEstimationConfig.TaggedField'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='PrivacyMetric.KMapEstimationConfig.TaggedField', + ) + region_code: str = proto.Field( + proto.STRING, + number=2, + ) + auxiliary_tables: MutableSequence['PrivacyMetric.KMapEstimationConfig.AuxiliaryTable'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable', + ) + + class DeltaPresenceEstimationConfig(proto.Message): + r"""δ-presence metric, used to estimate how likely it is for an + attacker to figure out that one given individual appears in a + de-identified dataset. Similarly to the k-map metric, we cannot + compute δ-presence exactly without knowing the attack dataset, + so we use a statistical model instead. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.QuasiId]): + Required. Fields considered to be + quasi-identifiers. No two fields can have the + same tag. + region_code (str): + ISO 3166-1 alpha-2 region code to use in the statistical + modeling. Set if no column is tagged with a region-specific + InfoType (like US_ZIP_5) or a region code. + auxiliary_tables (MutableSequence[google.cloud.dlp_v2.types.StatisticalTable]): + Several auxiliary tables can be used in the analysis. Each + custom_tag used to tag a quasi-identifiers field must appear + in exactly one field of one auxiliary table. + """ + + quasi_ids: MutableSequence['QuasiId'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='QuasiId', + ) + region_code: str = proto.Field( + proto.STRING, + number=2, + ) + auxiliary_tables: MutableSequence['StatisticalTable'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='StatisticalTable', + ) + + numerical_stats_config: NumericalStatsConfig = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=NumericalStatsConfig, + ) + categorical_stats_config: CategoricalStatsConfig = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message=CategoricalStatsConfig, + ) + k_anonymity_config: KAnonymityConfig = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message=KAnonymityConfig, + ) + l_diversity_config: LDiversityConfig = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message=LDiversityConfig, + ) + k_map_estimation_config: KMapEstimationConfig = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message=KMapEstimationConfig, + ) + delta_presence_estimation_config: DeltaPresenceEstimationConfig = proto.Field( + proto.MESSAGE, + number=6, + oneof='type', + message=DeltaPresenceEstimationConfig, + ) + + +class AnalyzeDataSourceRiskDetails(proto.Message): + r"""Result of a risk analysis operation request. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + requested_privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): + Privacy metric to compute. + requested_source_table (google.cloud.dlp_v2.types.BigQueryTable): + Input dataset to compute metrics over. + numerical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.NumericalStatsResult): + Numerical stats result + + This field is a member of `oneof`_ ``result``. + categorical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult): + Categorical stats result + + This field is a member of `oneof`_ ``result``. + k_anonymity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult): + K-anonymity result + + This field is a member of `oneof`_ ``result``. + l_diversity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult): + L-divesity result + + This field is a member of `oneof`_ ``result``. + k_map_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult): + K-map result + + This field is a member of `oneof`_ ``result``. + delta_presence_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult): + Delta-presence result + + This field is a member of `oneof`_ ``result``. + requested_options (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.RequestedRiskAnalysisOptions): + The configuration used for this job. + """ + + class NumericalStatsResult(proto.Message): + r"""Result of the numerical stats computation. + + Attributes: + min_value (google.cloud.dlp_v2.types.Value): + Minimum value appearing in the column. + max_value (google.cloud.dlp_v2.types.Value): + Maximum value appearing in the column. + quantile_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + List of 99 values that partition the set of + field values into 100 equal sized buckets. + """ + + min_value: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + max_value: 'Value' = proto.Field( + proto.MESSAGE, + number=2, + message='Value', + ) + quantile_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='Value', + ) + + class CategoricalStatsResult(proto.Message): + r"""Result of the categorical stats computation. + + Attributes: + value_frequency_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket]): + Histogram of value frequencies in the column. + """ + + class CategoricalStatsHistogramBucket(proto.Message): + r"""Histogram of value frequencies in the column. + + Attributes: + value_frequency_lower_bound (int): + Lower bound on the value frequency of the + values in this bucket. + value_frequency_upper_bound (int): + Upper bound on the value frequency of the + values in this bucket. + bucket_size (int): + Total number of values in this bucket. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.ValueFrequency]): + Sample of value frequencies in this bucket. + The total number of values returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct values in this + bucket. + """ + + value_frequency_lower_bound: int = proto.Field( + proto.INT64, + number=1, + ) + value_frequency_upper_bound: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=3, + ) + bucket_values: MutableSequence['ValueFrequency'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='ValueFrequency', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=5, + ) + + value_frequency_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket', + ) + + class KAnonymityResult(proto.Message): + r"""Result of the k-anonymity computation. + + Attributes: + equivalence_class_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket]): + Histogram of k-anonymity equivalence classes. + """ + + class KAnonymityEquivalenceClass(proto.Message): + r"""The set of columns' values that share the same ldiversity + value + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + Set of values defining the equivalence class. + One value per quasi-identifier column in the + original KAnonymity metric message. The order is + always the same as the original request. + equivalence_class_size (int): + Size of the equivalence class, for example + number of rows with the above set of values. + """ + + quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + equivalence_class_size: int = proto.Field( + proto.INT64, + number=2, + ) + + class KAnonymityHistogramBucket(proto.Message): + r"""Histogram of k-anonymity equivalence classes. + + Attributes: + equivalence_class_size_lower_bound (int): + Lower bound on the size of the equivalence + classes in this bucket. + equivalence_class_size_upper_bound (int): + Upper bound on the size of the equivalence + classes in this bucket. + bucket_size (int): + Total number of equivalence classes in this + bucket. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass]): + Sample of equivalence classes in this bucket. + The total number of classes returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct equivalence classes + in this bucket. + """ + + equivalence_class_size_lower_bound: int = proto.Field( + proto.INT64, + number=1, + ) + equivalence_class_size_upper_bound: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=3, + ) + bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=5, + ) + + equivalence_class_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket', + ) + + class LDiversityResult(proto.Message): + r"""Result of the l-diversity computation. + + Attributes: + sensitive_value_frequency_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket]): + Histogram of l-diversity equivalence class + sensitive value frequencies. + """ + + class LDiversityEquivalenceClass(proto.Message): + r"""The set of columns' values that share the same ldiversity + value. + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + Quasi-identifier values defining the + k-anonymity equivalence class. The order is + always the same as the original request. + equivalence_class_size (int): + Size of the k-anonymity equivalence class. + num_distinct_sensitive_values (int): + Number of distinct sensitive values in this + equivalence class. + top_sensitive_values (MutableSequence[google.cloud.dlp_v2.types.ValueFrequency]): + Estimated frequencies of top sensitive + values. + """ + + quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + equivalence_class_size: int = proto.Field( + proto.INT64, + number=2, + ) + num_distinct_sensitive_values: int = proto.Field( + proto.INT64, + number=3, + ) + top_sensitive_values: MutableSequence['ValueFrequency'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='ValueFrequency', + ) + + class LDiversityHistogramBucket(proto.Message): + r"""Histogram of l-diversity equivalence class sensitive value + frequencies. + + Attributes: + sensitive_value_frequency_lower_bound (int): + Lower bound on the sensitive value + frequencies of the equivalence classes in this + bucket. + sensitive_value_frequency_upper_bound (int): + Upper bound on the sensitive value + frequencies of the equivalence classes in this + bucket. + bucket_size (int): + Total number of equivalence classes in this + bucket. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass]): + Sample of equivalence classes in this bucket. + The total number of classes returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct equivalence classes + in this bucket. + """ + + sensitive_value_frequency_lower_bound: int = proto.Field( + proto.INT64, + number=1, + ) + sensitive_value_frequency_upper_bound: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=3, + ) + bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=5, + ) + + sensitive_value_frequency_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket', + ) + + class KMapEstimationResult(proto.Message): + r"""Result of the reidentifiability analysis. Note that these + results are an estimation, not exact values. + + Attributes: + k_map_estimation_histogram (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket]): + The intervals [min_anonymity, max_anonymity] do not overlap. + If a value doesn't correspond to any such interval, the + associated frequency is zero. For example, the following + records: {min_anonymity: 1, max_anonymity: 1, frequency: 17} + {min_anonymity: 2, max_anonymity: 3, frequency: 42} + {min_anonymity: 5, max_anonymity: 10, frequency: 99} mean + that there are no record with an estimated anonymity of 4, + 5, or larger than 10. + """ + + class KMapEstimationQuasiIdValues(proto.Message): + r"""A tuple of values for the quasi-identifier columns. + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + The quasi-identifier values. + estimated_anonymity (int): + The estimated anonymity for these + quasi-identifier values. + """ + + quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + estimated_anonymity: int = proto.Field( + proto.INT64, + number=2, + ) + + class KMapEstimationHistogramBucket(proto.Message): + r"""A KMapEstimationHistogramBucket message with the following values: + min_anonymity: 3 max_anonymity: 5 frequency: 42 means that there are + 42 records whose quasi-identifier values correspond to 3, 4 or 5 + people in the overlying population. An important particular case is + when min_anonymity = max_anonymity = 1: the frequency field then + corresponds to the number of uniquely identifiable records. + + Attributes: + min_anonymity (int): + Always positive. + max_anonymity (int): + Always greater than or equal to min_anonymity. + bucket_size (int): + Number of records within these anonymity + bounds. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues]): + Sample of quasi-identifier tuple values in + this bucket. The total number of classes + returned per bucket is capped at 20. + bucket_value_count (int): + Total number of distinct quasi-identifier + tuple values in this bucket. + """ + + min_anonymity: int = proto.Field( + proto.INT64, + number=1, + ) + max_anonymity: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=5, + ) + bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=7, + ) + + k_map_estimation_histogram: MutableSequence['AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket', + ) + + class DeltaPresenceEstimationResult(proto.Message): + r"""Result of the δ-presence computation. Note that these results + are an estimation, not exact values. + + Attributes: + delta_presence_estimation_histogram (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket]): + The intervals [min_probability, max_probability) do not + overlap. If a value doesn't correspond to any such interval, + the associated frequency is zero. For example, the following + records: {min_probability: 0, max_probability: 0.1, + frequency: 17} {min_probability: 0.2, max_probability: 0.3, + frequency: 42} {min_probability: 0.3, max_probability: 0.4, + frequency: 99} mean that there are no record with an + estimated probability in [0.1, 0.2) nor larger or equal to + 0.4. + """ + + class DeltaPresenceEstimationQuasiIdValues(proto.Message): + r"""A tuple of values for the quasi-identifier columns. + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + The quasi-identifier values. + estimated_probability (float): + The estimated probability that a given individual sharing + these quasi-identifier values is in the dataset. This value, + typically called δ, is the ratio between the number of + records in the dataset with these quasi-identifier values, + and the total number of individuals (inside *and* outside + the dataset) with these quasi-identifier values. For + example, if there are 15 individuals in the dataset who + share the same quasi-identifier values, and an estimated 100 + people in the entire population with these values, then δ is + 0.15. + """ + + quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + estimated_probability: float = proto.Field( + proto.DOUBLE, + number=2, + ) + + class DeltaPresenceEstimationHistogramBucket(proto.Message): + r"""A DeltaPresenceEstimationHistogramBucket message with the following + values: min_probability: 0.1 max_probability: 0.2 frequency: 42 + means that there are 42 records for which δ is in [0.1, 0.2). An + important particular case is when min_probability = max_probability + = 1: then, every individual who shares this quasi-identifier + combination is in the dataset. + + Attributes: + min_probability (float): + Between 0 and 1. + max_probability (float): + Always greater than or equal to min_probability. + bucket_size (int): + Number of records within these probability + bounds. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues]): + Sample of quasi-identifier tuple values in + this bucket. The total number of classes + returned per bucket is capped at 20. + bucket_value_count (int): + Total number of distinct quasi-identifier + tuple values in this bucket. + """ + + min_probability: float = proto.Field( + proto.DOUBLE, + number=1, + ) + max_probability: float = proto.Field( + proto.DOUBLE, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=5, + ) + bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=7, + ) + + delta_presence_estimation_histogram: MutableSequence['AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket', + ) + + class RequestedRiskAnalysisOptions(proto.Message): + r"""Risk analysis options. + + Attributes: + job_config (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): + The job config for the risk job. + """ + + job_config: 'RiskAnalysisJobConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='RiskAnalysisJobConfig', + ) + + requested_privacy_metric: 'PrivacyMetric' = proto.Field( + proto.MESSAGE, + number=1, + message='PrivacyMetric', + ) + requested_source_table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=2, + message=storage.BigQueryTable, + ) + numerical_stats_result: NumericalStatsResult = proto.Field( + proto.MESSAGE, + number=3, + oneof='result', + message=NumericalStatsResult, + ) + categorical_stats_result: CategoricalStatsResult = proto.Field( + proto.MESSAGE, + number=4, + oneof='result', + message=CategoricalStatsResult, + ) + k_anonymity_result: KAnonymityResult = proto.Field( + proto.MESSAGE, + number=5, + oneof='result', + message=KAnonymityResult, + ) + l_diversity_result: LDiversityResult = proto.Field( + proto.MESSAGE, + number=6, + oneof='result', + message=LDiversityResult, + ) + k_map_estimation_result: KMapEstimationResult = proto.Field( + proto.MESSAGE, + number=7, + oneof='result', + message=KMapEstimationResult, + ) + delta_presence_estimation_result: DeltaPresenceEstimationResult = proto.Field( + proto.MESSAGE, + number=9, + oneof='result', + message=DeltaPresenceEstimationResult, + ) + requested_options: RequestedRiskAnalysisOptions = proto.Field( + proto.MESSAGE, + number=10, + message=RequestedRiskAnalysisOptions, + ) + + +class ValueFrequency(proto.Message): + r"""A value of a field, including its frequency. + + Attributes: + value (google.cloud.dlp_v2.types.Value): + A value contained in the field in question. + count (int): + How many times the value is contained in the + field. + """ + + value: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + count: int = proto.Field( + proto.INT64, + number=2, + ) + + +class Value(proto.Message): + r"""Set of primitive values supported by the system. Note that for the + purposes of inspection or transformation, the number of bytes + considered to comprise a 'Value' is based on its representation as a + UTF-8 encoded string. For example, if 'integer_value' is set to + 123456789, the number of bytes would be counted as 9, even though an + int64 only holds up to 8 bytes of data. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + integer_value (int): + integer + + This field is a member of `oneof`_ ``type``. + float_value (float): + float + + This field is a member of `oneof`_ ``type``. + string_value (str): + string + + This field is a member of `oneof`_ ``type``. + boolean_value (bool): + boolean + + This field is a member of `oneof`_ ``type``. + timestamp_value (google.protobuf.timestamp_pb2.Timestamp): + timestamp + + This field is a member of `oneof`_ ``type``. + time_value (google.type.timeofday_pb2.TimeOfDay): + time of day + + This field is a member of `oneof`_ ``type``. + date_value (google.type.date_pb2.Date): + date + + This field is a member of `oneof`_ ``type``. + day_of_week_value (google.type.dayofweek_pb2.DayOfWeek): + day of week + + This field is a member of `oneof`_ ``type``. + """ + + integer_value: int = proto.Field( + proto.INT64, + number=1, + oneof='type', + ) + float_value: float = proto.Field( + proto.DOUBLE, + number=2, + oneof='type', + ) + string_value: str = proto.Field( + proto.STRING, + number=3, + oneof='type', + ) + boolean_value: bool = proto.Field( + proto.BOOL, + number=4, + oneof='type', + ) + timestamp_value: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message=timestamp_pb2.Timestamp, + ) + time_value: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=6, + oneof='type', + message=timeofday_pb2.TimeOfDay, + ) + date_value: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=7, + oneof='type', + message=date_pb2.Date, + ) + day_of_week_value: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=8, + oneof='type', + enum=dayofweek_pb2.DayOfWeek, + ) + + +class QuoteInfo(proto.Message): + r"""Message for infoType-dependent details parsed from quote. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + date_time (google.cloud.dlp_v2.types.DateTime): + The date time indicated by the quote. + + This field is a member of `oneof`_ ``parsed_quote``. + """ + + date_time: 'DateTime' = proto.Field( + proto.MESSAGE, + number=2, + oneof='parsed_quote', + message='DateTime', + ) + + +class DateTime(proto.Message): + r"""Message for a date time object. + e.g. 2018-01-01, 5th August. + + Attributes: + date (google.type.date_pb2.Date): + One or more of the following must be set. + Must be a valid date or time value. + day_of_week (google.type.dayofweek_pb2.DayOfWeek): + Day of week + time (google.type.timeofday_pb2.TimeOfDay): + Time of day + time_zone (google.cloud.dlp_v2.types.DateTime.TimeZone): + Time zone + """ + + class TimeZone(proto.Message): + r"""Time zone of the date time object. + + Attributes: + offset_minutes (int): + Set only if the offset can be determined. + Positive for time ahead of UTC. E.g. For + "UTC-9", this value is -540. + """ + + offset_minutes: int = proto.Field( + proto.INT32, + number=1, + ) + + date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=1, + message=date_pb2.Date, + ) + day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=2, + enum=dayofweek_pb2.DayOfWeek, + ) + time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=3, + message=timeofday_pb2.TimeOfDay, + ) + time_zone: TimeZone = proto.Field( + proto.MESSAGE, + number=4, + message=TimeZone, + ) + + +class DeidentifyConfig(proto.Message): + r"""The configuration that controls how the data will change. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): + Treat the dataset as free-form text and apply + the same free text transformation everywhere. + + This field is a member of `oneof`_ ``transformation``. + record_transformations (google.cloud.dlp_v2.types.RecordTransformations): + Treat the dataset as structured. + Transformations can be applied to specific + locations within structured datasets, such as + transforming a column within a table. + + This field is a member of `oneof`_ ``transformation``. + image_transformations (google.cloud.dlp_v2.types.ImageTransformations): + Treat the dataset as an image and redact. + + This field is a member of `oneof`_ ``transformation``. + transformation_error_handling (google.cloud.dlp_v2.types.TransformationErrorHandling): + Mode for handling transformation errors. If left + unspecified, the default mode is + ``TransformationErrorHandling.ThrowError``. + """ + + info_type_transformations: 'InfoTypeTransformations' = proto.Field( + proto.MESSAGE, + number=1, + oneof='transformation', + message='InfoTypeTransformations', + ) + record_transformations: 'RecordTransformations' = proto.Field( + proto.MESSAGE, + number=2, + oneof='transformation', + message='RecordTransformations', + ) + image_transformations: 'ImageTransformations' = proto.Field( + proto.MESSAGE, + number=4, + oneof='transformation', + message='ImageTransformations', + ) + transformation_error_handling: 'TransformationErrorHandling' = proto.Field( + proto.MESSAGE, + number=3, + message='TransformationErrorHandling', + ) + + +class ImageTransformations(proto.Message): + r"""A type of transformation that is applied over images. + + Attributes: + transforms (MutableSequence[google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation]): + + """ + + class ImageTransformation(proto.Message): + r"""Configuration for determining how redaction of images should + occur. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + selected_info_types (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.SelectedInfoTypes): + Apply transformation to the selected info_types. + + This field is a member of `oneof`_ ``target``. + all_info_types (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.AllInfoTypes): + Apply transformation to all findings not specified in other + ImageTransformation's selected_info_types. Only one instance + is allowed within the ImageTransformations message. + + This field is a member of `oneof`_ ``target``. + all_text (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.AllText): + Apply transformation to all text that doesn't + match an infoType. Only one instance is allowed + within the ImageTransformations message. + + This field is a member of `oneof`_ ``target``. + redaction_color (google.cloud.dlp_v2.types.Color): + The color to use when redacting content from + an image. If not specified, the default is + black. + """ + + class SelectedInfoTypes(proto.Message): + r"""Apply transformation to the selected info_types. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + Required. InfoTypes to apply the + transformation to. Required. Provided InfoType + must be unique within the ImageTransformations + message. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=storage.InfoType, + ) + + class AllInfoTypes(proto.Message): + r"""Apply transformation to all findings. + """ + + class AllText(proto.Message): + r"""Apply to all text. + """ + + selected_info_types: 'ImageTransformations.ImageTransformation.SelectedInfoTypes' = proto.Field( + proto.MESSAGE, + number=4, + oneof='target', + message='ImageTransformations.ImageTransformation.SelectedInfoTypes', + ) + all_info_types: 'ImageTransformations.ImageTransformation.AllInfoTypes' = proto.Field( + proto.MESSAGE, + number=5, + oneof='target', + message='ImageTransformations.ImageTransformation.AllInfoTypes', + ) + all_text: 'ImageTransformations.ImageTransformation.AllText' = proto.Field( + proto.MESSAGE, + number=6, + oneof='target', + message='ImageTransformations.ImageTransformation.AllText', + ) + redaction_color: 'Color' = proto.Field( + proto.MESSAGE, + number=3, + message='Color', + ) + + transforms: MutableSequence[ImageTransformation] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=ImageTransformation, + ) + + +class TransformationErrorHandling(proto.Message): + r"""How to handle transformation errors during de-identification. A + transformation error occurs when the requested transformation is + incompatible with the data. For example, trying to de-identify an IP + address using a ``DateShift`` transformation would result in a + transformation error, since date info cannot be extracted from an IP + address. Information about any incompatible transformations, and how + they were handled, is returned in the response as part of the + ``TransformationOverviews``. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + throw_error (google.cloud.dlp_v2.types.TransformationErrorHandling.ThrowError): + Throw an error + + This field is a member of `oneof`_ ``mode``. + leave_untransformed (google.cloud.dlp_v2.types.TransformationErrorHandling.LeaveUntransformed): + Ignore errors + + This field is a member of `oneof`_ ``mode``. + """ + + class ThrowError(proto.Message): + r"""Throw an error and fail the request when a transformation + error occurs. + + """ + + class LeaveUntransformed(proto.Message): + r"""Skips the data without modifying it if the requested transformation + would cause an error. For example, if a ``DateShift`` transformation + were applied an an IP address, this mode would leave the IP address + unchanged in the response. + + """ + + throw_error: ThrowError = proto.Field( + proto.MESSAGE, + number=1, + oneof='mode', + message=ThrowError, + ) + leave_untransformed: LeaveUntransformed = proto.Field( + proto.MESSAGE, + number=2, + oneof='mode', + message=LeaveUntransformed, + ) + + +class PrimitiveTransformation(proto.Message): + r"""A rule for transforming a value. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + replace_config (google.cloud.dlp_v2.types.ReplaceValueConfig): + Replace with a specified value. + + This field is a member of `oneof`_ ``transformation``. + redact_config (google.cloud.dlp_v2.types.RedactConfig): + Redact + + This field is a member of `oneof`_ ``transformation``. + character_mask_config (google.cloud.dlp_v2.types.CharacterMaskConfig): + Mask + + This field is a member of `oneof`_ ``transformation``. + crypto_replace_ffx_fpe_config (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig): + Ffx-Fpe + + This field is a member of `oneof`_ ``transformation``. + fixed_size_bucketing_config (google.cloud.dlp_v2.types.FixedSizeBucketingConfig): + Fixed size bucketing + + This field is a member of `oneof`_ ``transformation``. + bucketing_config (google.cloud.dlp_v2.types.BucketingConfig): + Bucketing + + This field is a member of `oneof`_ ``transformation``. + replace_with_info_type_config (google.cloud.dlp_v2.types.ReplaceWithInfoTypeConfig): + Replace with infotype + + This field is a member of `oneof`_ ``transformation``. + time_part_config (google.cloud.dlp_v2.types.TimePartConfig): + Time extraction + + This field is a member of `oneof`_ ``transformation``. + crypto_hash_config (google.cloud.dlp_v2.types.CryptoHashConfig): + Crypto + + This field is a member of `oneof`_ ``transformation``. + date_shift_config (google.cloud.dlp_v2.types.DateShiftConfig): + Date Shift + + This field is a member of `oneof`_ ``transformation``. + crypto_deterministic_config (google.cloud.dlp_v2.types.CryptoDeterministicConfig): + Deterministic Crypto + + This field is a member of `oneof`_ ``transformation``. + replace_dictionary_config (google.cloud.dlp_v2.types.ReplaceDictionaryConfig): + Replace with a value randomly drawn (with + replacement) from a dictionary. + + This field is a member of `oneof`_ ``transformation``. + """ + + replace_config: 'ReplaceValueConfig' = proto.Field( + proto.MESSAGE, + number=1, + oneof='transformation', + message='ReplaceValueConfig', + ) + redact_config: 'RedactConfig' = proto.Field( + proto.MESSAGE, + number=2, + oneof='transformation', + message='RedactConfig', + ) + character_mask_config: 'CharacterMaskConfig' = proto.Field( + proto.MESSAGE, + number=3, + oneof='transformation', + message='CharacterMaskConfig', + ) + crypto_replace_ffx_fpe_config: 'CryptoReplaceFfxFpeConfig' = proto.Field( + proto.MESSAGE, + number=4, + oneof='transformation', + message='CryptoReplaceFfxFpeConfig', + ) + fixed_size_bucketing_config: 'FixedSizeBucketingConfig' = proto.Field( + proto.MESSAGE, + number=5, + oneof='transformation', + message='FixedSizeBucketingConfig', + ) + bucketing_config: 'BucketingConfig' = proto.Field( + proto.MESSAGE, + number=6, + oneof='transformation', + message='BucketingConfig', + ) + replace_with_info_type_config: 'ReplaceWithInfoTypeConfig' = proto.Field( + proto.MESSAGE, + number=7, + oneof='transformation', + message='ReplaceWithInfoTypeConfig', + ) + time_part_config: 'TimePartConfig' = proto.Field( + proto.MESSAGE, + number=8, + oneof='transformation', + message='TimePartConfig', + ) + crypto_hash_config: 'CryptoHashConfig' = proto.Field( + proto.MESSAGE, + number=9, + oneof='transformation', + message='CryptoHashConfig', + ) + date_shift_config: 'DateShiftConfig' = proto.Field( + proto.MESSAGE, + number=11, + oneof='transformation', + message='DateShiftConfig', + ) + crypto_deterministic_config: 'CryptoDeterministicConfig' = proto.Field( + proto.MESSAGE, + number=12, + oneof='transformation', + message='CryptoDeterministicConfig', + ) + replace_dictionary_config: 'ReplaceDictionaryConfig' = proto.Field( + proto.MESSAGE, + number=13, + oneof='transformation', + message='ReplaceDictionaryConfig', + ) + + +class TimePartConfig(proto.Message): + r"""For use with ``Date``, ``Timestamp``, and ``TimeOfDay``, extract or + preserve a portion of the value. + + Attributes: + part_to_extract (google.cloud.dlp_v2.types.TimePartConfig.TimePart): + The part of the time to keep. + """ + class TimePart(proto.Enum): + r"""Components that make up time. + + Values: + TIME_PART_UNSPECIFIED (0): + Unused + YEAR (1): + [0-9999] + MONTH (2): + [1-12] + DAY_OF_MONTH (3): + [1-31] + DAY_OF_WEEK (4): + [1-7] + WEEK_OF_YEAR (5): + [1-53] + HOUR_OF_DAY (6): + [0-23] + """ + TIME_PART_UNSPECIFIED = 0 + YEAR = 1 + MONTH = 2 + DAY_OF_MONTH = 3 + DAY_OF_WEEK = 4 + WEEK_OF_YEAR = 5 + HOUR_OF_DAY = 6 + + part_to_extract: TimePart = proto.Field( + proto.ENUM, + number=1, + enum=TimePart, + ) + + +class CryptoHashConfig(proto.Message): + r"""Pseudonymization method that generates surrogates via + cryptographic hashing. Uses SHA-256. + The key size must be either 32 or 64 bytes. + Outputs a base64 encoded representation of the hashed output + (for example, L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=). + Currently, only string and integer values can be hashed. See + https://cloud.google.com/dlp/docs/pseudonymization to learn + more. + + Attributes: + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + The key used by the hash function. + """ + + crypto_key: 'CryptoKey' = proto.Field( + proto.MESSAGE, + number=1, + message='CryptoKey', + ) + + +class CryptoDeterministicConfig(proto.Message): + r"""Pseudonymization method that generates deterministic + encryption for the given input. Outputs a base64 encoded + representation of the encrypted output. Uses AES-SIV based on + the RFC https://tools.ietf.org/html/rfc5297. + + Attributes: + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + The key used by the encryption function. For + deterministic encryption using AES-SIV, the + provided key is internally expanded to 64 bytes + prior to use. + surrogate_info_type (google.cloud.dlp_v2.types.InfoType): + The custom info type to annotate the surrogate with. This + annotation will be applied to the surrogate by prefixing it + with the name of the custom info type followed by the number + of characters comprising the surrogate. The following scheme + defines the format: {info type name}({surrogate character + count}):{surrogate} + + For example, if the name of custom info type is + 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full + replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' + + This annotation identifies the surrogate when inspecting + content using the custom info type 'Surrogate'. This + facilitates reversal of the surrogate when it occurs in free + text. + + Note: For record transformations where the entire cell in a + table is being transformed, surrogates are not mandatory. + Surrogates are used to denote the location of the token and + are necessary for re-identification in free form text. + + In order for inspection to work properly, the name of this + info type must not occur naturally anywhere in your data; + otherwise, inspection may either + + - reverse a surrogate that does not correspond to an actual + identifier + - be unable to parse the surrogate and result in an error + + Therefore, choose your custom info type name carefully after + considering what your data looks like. One way to select a + name that has a high chance of yielding reliable detection + is to include one or more unicode characters that are highly + improbable to exist in your data. For example, assuming your + data is entered from a regular ASCII keyboard, the symbol + with the hex code point 29DD might be used like so: + ⧝MY_TOKEN_TYPE. + context (google.cloud.dlp_v2.types.FieldId): + A context may be used for higher security and maintaining + referential integrity such that the same identifier in two + different contexts will be given a distinct surrogate. The + context is appended to plaintext value being encrypted. On + decryption the provided context is validated against the + value used during encryption. If a context was provided + during encryption, same context must be provided during + decryption as well. + + If the context is not set, plaintext would be used as is for + encryption. If the context is set but: + + 1. there is no record present when transforming a given + value or + 2. the field is not present when transforming a given value, + + plaintext would be used as is for encryption. + + Note that case (1) is expected when an + ``InfoTypeTransformation`` is applied to both structured and + unstructured ``ContentItem``\ s. + """ + + crypto_key: 'CryptoKey' = proto.Field( + proto.MESSAGE, + number=1, + message='CryptoKey', + ) + surrogate_info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + message=storage.InfoType, + ) + context: storage.FieldId = proto.Field( + proto.MESSAGE, + number=3, + message=storage.FieldId, + ) + + +class ReplaceValueConfig(proto.Message): + r"""Replace each input value with a given ``Value``. + + Attributes: + new_value (google.cloud.dlp_v2.types.Value): + Value to replace it with. + """ + + new_value: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + + +class ReplaceDictionaryConfig(proto.Message): + r"""Replace each input value with a value randomly selected from + the dictionary. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): + A list of words to select from for random replacement. The + `limits `__ page + contains details about the size limits of dictionaries. + + This field is a member of `oneof`_ ``type``. + """ + + word_list: storage.CustomInfoType.Dictionary.WordList = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.CustomInfoType.Dictionary.WordList, + ) + + +class ReplaceWithInfoTypeConfig(proto.Message): + r"""Replace each matching finding with the name of the info_type. + """ + + +class RedactConfig(proto.Message): + r"""Redact a given value. For example, if used with an + ``InfoTypeTransformation`` transforming PHONE_NUMBER, and input 'My + phone number is 206-555-0123', the output would be 'My phone number + is '. + + """ + + +class CharsToIgnore(proto.Message): + r"""Characters to skip when doing deidentification of a value. + These will be left alone and skipped. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + characters_to_skip (str): + Characters to not transform when masking. + + This field is a member of `oneof`_ ``characters``. + common_characters_to_ignore (google.cloud.dlp_v2.types.CharsToIgnore.CommonCharsToIgnore): + Common characters to not transform when + masking. Useful to avoid removing punctuation. + + This field is a member of `oneof`_ ``characters``. + """ + class CommonCharsToIgnore(proto.Enum): + r"""Convenience enum for indicating common characters to not + transform. + + Values: + COMMON_CHARS_TO_IGNORE_UNSPECIFIED (0): + Unused. + NUMERIC (1): + 0-9 + ALPHA_UPPER_CASE (2): + A-Z + ALPHA_LOWER_CASE (3): + a-z + PUNCTUATION (4): + US Punctuation, one of !"#$%&'()*+,-./:;<=>?@[]^_`{|}~ + WHITESPACE (5): + Whitespace character, one of [ \\t\n\x0B\f\r] + """ + COMMON_CHARS_TO_IGNORE_UNSPECIFIED = 0 + NUMERIC = 1 + ALPHA_UPPER_CASE = 2 + ALPHA_LOWER_CASE = 3 + PUNCTUATION = 4 + WHITESPACE = 5 + + characters_to_skip: str = proto.Field( + proto.STRING, + number=1, + oneof='characters', + ) + common_characters_to_ignore: CommonCharsToIgnore = proto.Field( + proto.ENUM, + number=2, + oneof='characters', + enum=CommonCharsToIgnore, + ) + + +class CharacterMaskConfig(proto.Message): + r"""Partially mask a string by replacing a given number of characters + with a fixed character. Masking can start from the beginning or end + of the string. This can be used on data of any type (numbers, longs, + and so on) and when de-identifying structured data we'll attempt to + preserve the original data's type. (This allows you to take a long + like 123 and modify it to a string like \**3. + + Attributes: + masking_character (str): + Character to use to mask the sensitive values—for example, + ``*`` for an alphabetic string such as a name, or ``0`` for + a numeric string such as ZIP code or credit card number. + This string must have a length of 1. If not supplied, this + value defaults to ``*`` for strings, and ``0`` for digits. + number_to_mask (int): + Number of characters to mask. If not set, all matching chars + will be masked. Skipped characters do not count towards this + tally. + + If ``number_to_mask`` is negative, this denotes inverse + masking. Cloud DLP masks all but a number of characters. For + example, suppose you have the following values: + + - ``masking_character`` is ``*`` + - ``number_to_mask`` is ``-4`` + - ``reverse_order`` is ``false`` + - ``CharsToIgnore`` includes ``-`` + - Input string is ``1234-5678-9012-3456`` + + The resulting de-identified string is + ``****-****-****-3456``. Cloud DLP masks all but the last + four characters. If ``reverse_order`` is ``true``, all but + the first four characters are masked as + ``1234-****-****-****``. + reverse_order (bool): + Mask characters in reverse order. For example, if + ``masking_character`` is ``0``, ``number_to_mask`` is + ``14``, and ``reverse_order`` is ``false``, then the input + string ``1234-5678-9012-3456`` is masked as + ``00000000000000-3456``. If ``masking_character`` is ``*``, + ``number_to_mask`` is ``3``, and ``reverse_order`` is + ``true``, then the string ``12345`` is masked as ``12***``. + characters_to_ignore (MutableSequence[google.cloud.dlp_v2.types.CharsToIgnore]): + When masking a string, items in this list will be skipped + when replacing characters. For example, if the input string + is ``555-555-5555`` and you instruct Cloud DLP to skip ``-`` + and mask 5 characters with ``*``, Cloud DLP returns + ``***-**5-5555``. + """ + + masking_character: str = proto.Field( + proto.STRING, + number=1, + ) + number_to_mask: int = proto.Field( + proto.INT32, + number=2, + ) + reverse_order: bool = proto.Field( + proto.BOOL, + number=3, + ) + characters_to_ignore: MutableSequence['CharsToIgnore'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='CharsToIgnore', + ) + + +class FixedSizeBucketingConfig(proto.Message): + r"""Buckets values based on fixed size ranges. The Bucketing + transformation can provide all of this functionality, but requires + more configuration. This message is provided as a convenience to the + user for simple bucketing strategies. + + The transformed value will be a hyphenated string of + {lower_bound}-{upper_bound}. For example, if lower_bound = 10 and + upper_bound = 20, all values that are within this bucket will be + replaced with "10-20". + + This can be used on data of type: double, long. + + If the bound Value type differs from the type of data being + transformed, we will first attempt converting the type of the data + to be transformed to match the type of the bound before comparing. + + See https://cloud.google.com/dlp/docs/concepts-bucketing to learn + more. + + Attributes: + lower_bound (google.cloud.dlp_v2.types.Value): + Required. Lower bound value of buckets. All values less than + ``lower_bound`` are grouped together into a single bucket; + for example if ``lower_bound`` = 10, then all values less + than 10 are replaced with the value "-10". + upper_bound (google.cloud.dlp_v2.types.Value): + Required. Upper bound value of buckets. All values greater + than upper_bound are grouped together into a single bucket; + for example if ``upper_bound`` = 89, then all values greater + than 89 are replaced with the value "89+". + bucket_size (float): + Required. Size of each bucket (except for minimum and + maximum buckets). So if ``lower_bound`` = 10, + ``upper_bound`` = 89, and ``bucket_size`` = 10, then the + following buckets would be used: -10, 10-20, 20-30, 30-40, + 40-50, 50-60, 60-70, 70-80, 80-89, 89+. Precision up to 2 + decimals works. + """ + + lower_bound: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + upper_bound: 'Value' = proto.Field( + proto.MESSAGE, + number=2, + message='Value', + ) + bucket_size: float = proto.Field( + proto.DOUBLE, + number=3, + ) + + +class BucketingConfig(proto.Message): + r"""Generalization function that buckets values based on ranges. The + ranges and replacement values are dynamically provided by the user + for custom behavior, such as 1-30 -> LOW 31-65 -> MEDIUM 66-100 -> + HIGH This can be used on data of type: number, long, string, + timestamp. If the bound ``Value`` type differs from the type of data + being transformed, we will first attempt converting the type of the + data to be transformed to match the type of the bound before + comparing. See https://cloud.google.com/dlp/docs/concepts-bucketing + to learn more. + + Attributes: + buckets (MutableSequence[google.cloud.dlp_v2.types.BucketingConfig.Bucket]): + Set of buckets. Ranges must be + non-overlapping. + """ + + class Bucket(proto.Message): + r"""Bucket is represented as a range, along with replacement + values. + + Attributes: + min_ (google.cloud.dlp_v2.types.Value): + Lower bound of the range, inclusive. Type + should be the same as max if used. + max_ (google.cloud.dlp_v2.types.Value): + Upper bound of the range, exclusive; type + must match min. + replacement_value (google.cloud.dlp_v2.types.Value): + Required. Replacement value for this bucket. + """ + + min_: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + max_: 'Value' = proto.Field( + proto.MESSAGE, + number=2, + message='Value', + ) + replacement_value: 'Value' = proto.Field( + proto.MESSAGE, + number=3, + message='Value', + ) + + buckets: MutableSequence[Bucket] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Bucket, + ) + + +class CryptoReplaceFfxFpeConfig(proto.Message): + r"""Replaces an identifier with a surrogate using Format Preserving + Encryption (FPE) with the FFX mode of operation; however when used + in the ``ReidentifyContent`` API method, it serves the opposite + function by reversing the surrogate back into the original + identifier. The identifier must be encoded as ASCII. For a given + crypto key and context, the same identifier will be replaced with + the same surrogate. Identifiers must be at least two characters + long. In the case that the identifier is the empty string, it will + be skipped. See https://cloud.google.com/dlp/docs/pseudonymization + to learn more. + + Note: We recommend using CryptoDeterministicConfig for all use cases + which do not require preserving the input alphabet space and size, + plus warrant referential integrity. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + Required. The key used by the encryption + algorithm. + context (google.cloud.dlp_v2.types.FieldId): + The 'tweak', a context may be used for higher security since + the same identifier in two different contexts won't be given + the same surrogate. If the context is not set, a default + tweak will be used. + + If the context is set but: + + 1. there is no record present when transforming a given + value or + 2. the field is not present when transforming a given value, + + a default tweak will be used. + + Note that case (1) is expected when an + ``InfoTypeTransformation`` is applied to both structured and + unstructured ``ContentItem``\ s. Currently, the referenced + field may be of value type integer or string. + + The tweak is constructed as a sequence of bytes in big + endian byte order such that: + + - a 64 bit integer is encoded followed by a single byte of + value 1 + - a string is encoded in UTF-8 format followed by a single + byte of value 2 + common_alphabet (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig.FfxCommonNativeAlphabet): + Common alphabets. + + This field is a member of `oneof`_ ``alphabet``. + custom_alphabet (str): + This is supported by mapping these to the alphanumeric + characters that the FFX mode natively supports. This happens + before/after encryption/decryption. Each character listed + must appear only once. Number of characters must be in the + range [2, 95]. This must be encoded as ASCII. The order of + characters does not matter. The full list of allowed + characters is: + 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz + ~`!@#$%^&*()_-+={[}]|:;"'<,>.?/ + + This field is a member of `oneof`_ ``alphabet``. + radix (int): + The native way to select the alphabet. Must be in the range + [2, 95]. + + This field is a member of `oneof`_ ``alphabet``. + surrogate_info_type (google.cloud.dlp_v2.types.InfoType): + The custom infoType to annotate the surrogate with. This + annotation will be applied to the surrogate by prefixing it + with the name of the custom infoType followed by the number + of characters comprising the surrogate. The following scheme + defines the format: + info_type_name(surrogate_character_count):surrogate + + For example, if the name of custom infoType is + 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full + replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' + + This annotation identifies the surrogate when inspecting + content using the custom infoType + ```SurrogateType`` `__. + This facilitates reversal of the surrogate when it occurs in + free text. + + In order for inspection to work properly, the name of this + infoType must not occur naturally anywhere in your data; + otherwise, inspection may find a surrogate that does not + correspond to an actual identifier. Therefore, choose your + custom infoType name carefully after considering what your + data looks like. One way to select a name that has a high + chance of yielding reliable detection is to include one or + more unicode characters that are highly improbable to exist + in your data. For example, assuming your data is entered + from a regular ASCII keyboard, the symbol with the hex code + point 29DD might be used like so: ⧝MY_TOKEN_TYPE + """ + class FfxCommonNativeAlphabet(proto.Enum): + r"""These are commonly used subsets of the alphabet that the FFX + mode natively supports. In the algorithm, the alphabet is + selected using the "radix". Therefore each corresponds to a + particular radix. + + Values: + FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED (0): + Unused. + NUMERIC (1): + ``[0-9]`` (radix of 10) + HEXADECIMAL (2): + ``[0-9A-F]`` (radix of 16) + UPPER_CASE_ALPHA_NUMERIC (3): + ``[0-9A-Z]`` (radix of 36) + ALPHA_NUMERIC (4): + ``[0-9A-Za-z]`` (radix of 62) + """ + FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED = 0 + NUMERIC = 1 + HEXADECIMAL = 2 + UPPER_CASE_ALPHA_NUMERIC = 3 + ALPHA_NUMERIC = 4 + + crypto_key: 'CryptoKey' = proto.Field( + proto.MESSAGE, + number=1, + message='CryptoKey', + ) + context: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + common_alphabet: FfxCommonNativeAlphabet = proto.Field( + proto.ENUM, + number=4, + oneof='alphabet', + enum=FfxCommonNativeAlphabet, + ) + custom_alphabet: str = proto.Field( + proto.STRING, + number=5, + oneof='alphabet', + ) + radix: int = proto.Field( + proto.INT32, + number=6, + oneof='alphabet', + ) + surrogate_info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=8, + message=storage.InfoType, + ) + + +class CryptoKey(proto.Message): + r"""This is a data encryption key (DEK) (as opposed to + a key encryption key (KEK) stored by Cloud Key Management + Service (Cloud KMS). + When using Cloud KMS to wrap or unwrap a DEK, be sure to set an + appropriate IAM policy on the KEK to ensure an attacker cannot + unwrap the DEK. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + transient (google.cloud.dlp_v2.types.TransientCryptoKey): + Transient crypto key + + This field is a member of `oneof`_ ``source``. + unwrapped (google.cloud.dlp_v2.types.UnwrappedCryptoKey): + Unwrapped crypto key + + This field is a member of `oneof`_ ``source``. + kms_wrapped (google.cloud.dlp_v2.types.KmsWrappedCryptoKey): + Key wrapped using Cloud KMS + + This field is a member of `oneof`_ ``source``. + """ + + transient: 'TransientCryptoKey' = proto.Field( + proto.MESSAGE, + number=1, + oneof='source', + message='TransientCryptoKey', + ) + unwrapped: 'UnwrappedCryptoKey' = proto.Field( + proto.MESSAGE, + number=2, + oneof='source', + message='UnwrappedCryptoKey', + ) + kms_wrapped: 'KmsWrappedCryptoKey' = proto.Field( + proto.MESSAGE, + number=3, + oneof='source', + message='KmsWrappedCryptoKey', + ) + + +class TransientCryptoKey(proto.Message): + r"""Use this to have a random data crypto key generated. + It will be discarded after the request finishes. + + Attributes: + name (str): + Required. Name of the key. This is an arbitrary string used + to differentiate different keys. A unique key is generated + per name: two separate ``TransientCryptoKey`` protos share + the same generated key if their names are the same. When the + data crypto key is generated, this name is not used in any + way (repeating the api call will result in a different key + being generated). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UnwrappedCryptoKey(proto.Message): + r"""Using raw keys is prone to security risks due to accidentally + leaking the key. Choose another type of key if possible. + + Attributes: + key (bytes): + Required. A 128/192/256 bit key. + """ + + key: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + +class KmsWrappedCryptoKey(proto.Message): + r"""Include to use an existing data crypto key wrapped by KMS. The + wrapped key must be a 128-, 192-, or 256-bit key. Authorization + requires the following IAM permissions when sending a request to + perform a crypto transformation using a KMS-wrapped crypto key: + dlp.kms.encrypt + + For more information, see [Creating a wrapped key] + (https://cloud.google.com/dlp/docs/create-wrapped-key). + + Note: When you use Cloud KMS for cryptographic operations, `charges + apply `__. + + Attributes: + wrapped_key (bytes): + Required. The wrapped data crypto key. + crypto_key_name (str): + Required. The resource name of the KMS + CryptoKey to use for unwrapping. + """ + + wrapped_key: bytes = proto.Field( + proto.BYTES, + number=1, + ) + crypto_key_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DateShiftConfig(proto.Message): + r"""Shifts dates by random number of days, with option to be + consistent for the same context. See + https://cloud.google.com/dlp/docs/concepts-date-shifting to + learn more. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + upper_bound_days (int): + Required. Range of shift in days. Actual + shift will be selected at random within this + range (inclusive ends). Negative means shift to + earlier in time. Must not be more than 365250 + days (1000 years) each direction. + For example, 3 means shift date to at most 3 + days into the future. + lower_bound_days (int): + Required. For example, -5 means shift date to + at most 5 days back in the past. + context (google.cloud.dlp_v2.types.FieldId): + Points to the field that contains the + context, for example, an entity id. If set, must + also set cryptoKey. If set, shift will be + consistent for the given context. + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + Causes the shift to be computed based on this key and the + context. This results in the same shift for the same context + and crypto_key. If set, must also set context. Can only be + applied to table items. + + This field is a member of `oneof`_ ``method``. + """ + + upper_bound_days: int = proto.Field( + proto.INT32, + number=1, + ) + lower_bound_days: int = proto.Field( + proto.INT32, + number=2, + ) + context: storage.FieldId = proto.Field( + proto.MESSAGE, + number=3, + message=storage.FieldId, + ) + crypto_key: 'CryptoKey' = proto.Field( + proto.MESSAGE, + number=4, + oneof='method', + message='CryptoKey', + ) + + +class InfoTypeTransformations(proto.Message): + r"""A type of transformation that will scan unstructured text and apply + various ``PrimitiveTransformation``\ s to each finding, where the + transformation is applied to only values that were identified as a + specific info_type. + + Attributes: + transformations (MutableSequence[google.cloud.dlp_v2.types.InfoTypeTransformations.InfoTypeTransformation]): + Required. Transformation for each infoType. + Cannot specify more than one for a given + infoType. + """ + + class InfoTypeTransformation(proto.Message): + r"""A transformation to apply to text that is identified as a specific + info_type. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + InfoTypes to apply the transformation to. An empty list will + cause this transformation to apply to all findings that + correspond to infoTypes that were requested in + ``InspectConfig``. + primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): + Required. Primitive transformation to apply + to the infoType. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + primitive_transformation: 'PrimitiveTransformation' = proto.Field( + proto.MESSAGE, + number=2, + message='PrimitiveTransformation', + ) + + transformations: MutableSequence[InfoTypeTransformation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=InfoTypeTransformation, + ) + + +class FieldTransformation(proto.Message): + r"""The transformation to apply to the field. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Required. Input field(s) to apply the transformation to. + When you have columns that reference their position within a + list, omit the index from the FieldId. FieldId name matching + ignores the index. For example, instead of + "contact.nums[0].type", use "contact.nums.type". + condition (google.cloud.dlp_v2.types.RecordCondition): + Only apply the transformation if the condition evaluates to + true for the given ``RecordCondition``. The conditions are + allowed to reference fields that are not used in the actual + transformation. + + Example Use Cases: + + - Apply a different bucket transformation to an age column + if the zip code column for the same record is within a + specific range. + - Redact a field if the date of birth field is greater than + 85. + primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): + Apply the transformation to the entire field. + + This field is a member of `oneof`_ ``transformation``. + info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): + Treat the contents of the field as free text, and + selectively transform content that matches an ``InfoType``. + + This field is a member of `oneof`_ ``transformation``. + """ + + fields: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + condition: 'RecordCondition' = proto.Field( + proto.MESSAGE, + number=3, + message='RecordCondition', + ) + primitive_transformation: 'PrimitiveTransformation' = proto.Field( + proto.MESSAGE, + number=4, + oneof='transformation', + message='PrimitiveTransformation', + ) + info_type_transformations: 'InfoTypeTransformations' = proto.Field( + proto.MESSAGE, + number=5, + oneof='transformation', + message='InfoTypeTransformations', + ) + + +class RecordTransformations(proto.Message): + r"""A type of transformation that is applied over structured data + such as a table. + + Attributes: + field_transformations (MutableSequence[google.cloud.dlp_v2.types.FieldTransformation]): + Transform the record by applying various + field transformations. + record_suppressions (MutableSequence[google.cloud.dlp_v2.types.RecordSuppression]): + Configuration defining which records get + suppressed entirely. Records that match any + suppression rule are omitted from the output. + """ + + field_transformations: MutableSequence['FieldTransformation'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='FieldTransformation', + ) + record_suppressions: MutableSequence['RecordSuppression'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='RecordSuppression', + ) + + +class RecordSuppression(proto.Message): + r"""Configuration to suppress records whose suppression + conditions evaluate to true. + + Attributes: + condition (google.cloud.dlp_v2.types.RecordCondition): + A condition that when it evaluates to true + will result in the record being evaluated to be + suppressed from the transformed content. + """ + + condition: 'RecordCondition' = proto.Field( + proto.MESSAGE, + number=1, + message='RecordCondition', + ) + + +class RecordCondition(proto.Message): + r"""A condition for determining whether a transformation should + be applied to a field. + + Attributes: + expressions (google.cloud.dlp_v2.types.RecordCondition.Expressions): + An expression. + """ + + class Condition(proto.Message): + r"""The field type of ``value`` and ``field`` do not need to match to be + considered equal, but not all comparisons are possible. EQUAL_TO and + NOT_EQUAL_TO attempt to compare even with incompatible types, but + all other comparisons are invalid with incompatible types. A + ``value`` of type: + + - ``string`` can be compared against all other types + - ``boolean`` can only be compared against other booleans + - ``integer`` can be compared against doubles or a string if the + string value can be parsed as an integer. + - ``double`` can be compared against integers or a string if the + string can be parsed as a double. + - ``Timestamp`` can be compared against strings in RFC 3339 date + string format. + - ``TimeOfDay`` can be compared against timestamps and strings in + the format of 'HH:mm:ss'. + + If we fail to compare do to type mismatch, a warning will be given + and the condition will evaluate to false. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Required. Field within the record this + condition is evaluated against. + operator (google.cloud.dlp_v2.types.RelationalOperator): + Required. Operator used to compare the field + or infoType to the value. + value (google.cloud.dlp_v2.types.Value): + Value to compare against. [Mandatory, except for ``EXISTS`` + tests.] + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + operator: 'RelationalOperator' = proto.Field( + proto.ENUM, + number=3, + enum='RelationalOperator', + ) + value: 'Value' = proto.Field( + proto.MESSAGE, + number=4, + message='Value', + ) + + class Conditions(proto.Message): + r"""A collection of conditions. + + Attributes: + conditions (MutableSequence[google.cloud.dlp_v2.types.RecordCondition.Condition]): + A collection of conditions. + """ + + conditions: MutableSequence['RecordCondition.Condition'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='RecordCondition.Condition', + ) + + class Expressions(proto.Message): + r"""An expression, consisting of an operator and conditions. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + logical_operator (google.cloud.dlp_v2.types.RecordCondition.Expressions.LogicalOperator): + The operator to apply to the result of conditions. Default + and currently only supported value is ``AND``. + conditions (google.cloud.dlp_v2.types.RecordCondition.Conditions): + Conditions to apply to the expression. + + This field is a member of `oneof`_ ``type``. + """ + class LogicalOperator(proto.Enum): + r"""Logical operators for conditional checks. + + Values: + LOGICAL_OPERATOR_UNSPECIFIED (0): + Unused + AND (1): + Conditional AND + """ + LOGICAL_OPERATOR_UNSPECIFIED = 0 + AND = 1 + + logical_operator: 'RecordCondition.Expressions.LogicalOperator' = proto.Field( + proto.ENUM, + number=1, + enum='RecordCondition.Expressions.LogicalOperator', + ) + conditions: 'RecordCondition.Conditions' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='RecordCondition.Conditions', + ) + + expressions: Expressions = proto.Field( + proto.MESSAGE, + number=3, + message=Expressions, + ) + + +class TransformationOverview(proto.Message): + r"""Overview of the modifications that occurred. + + Attributes: + transformed_bytes (int): + Total size in bytes that were transformed in + some way. + transformation_summaries (MutableSequence[google.cloud.dlp_v2.types.TransformationSummary]): + Transformations applied to the dataset. + """ + + transformed_bytes: int = proto.Field( + proto.INT64, + number=2, + ) + transformation_summaries: MutableSequence['TransformationSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='TransformationSummary', + ) + + +class TransformationSummary(proto.Message): + r"""Summary of a single transformation. Only one of 'transformation', + 'field_transformation', or 'record_suppress' will be set. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Set if the transformation was limited to a + specific InfoType. + field (google.cloud.dlp_v2.types.FieldId): + Set if the transformation was limited to a + specific FieldId. + transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): + The specific transformation these stats apply + to. + field_transformations (MutableSequence[google.cloud.dlp_v2.types.FieldTransformation]): + The field transformation that was applied. + If multiple field transformations are requested + for a single field, this list will contain all + of them; otherwise, only one is supplied. + record_suppress (google.cloud.dlp_v2.types.RecordSuppression): + The specific suppression option these stats + apply to. + results (MutableSequence[google.cloud.dlp_v2.types.TransformationSummary.SummaryResult]): + Collection of all transformations that took + place or had an error. + transformed_bytes (int): + Total size in bytes that were transformed in + some way. + """ + class TransformationResultCode(proto.Enum): + r"""Possible outcomes of transformations. + + Values: + TRANSFORMATION_RESULT_CODE_UNSPECIFIED (0): + Unused + SUCCESS (1): + Transformation completed without an error. + ERROR (2): + Transformation had an error. + """ + TRANSFORMATION_RESULT_CODE_UNSPECIFIED = 0 + SUCCESS = 1 + ERROR = 2 + + class SummaryResult(proto.Message): + r"""A collection that informs the user the number of times a particular + ``TransformationResultCode`` and error details occurred. + + Attributes: + count (int): + Number of transformations counted by this + result. + code (google.cloud.dlp_v2.types.TransformationSummary.TransformationResultCode): + Outcome of the transformation. + details (str): + A place for warnings or errors to show up if + a transformation didn't work as expected. + """ + + count: int = proto.Field( + proto.INT64, + number=1, + ) + code: 'TransformationSummary.TransformationResultCode' = proto.Field( + proto.ENUM, + number=2, + enum='TransformationSummary.TransformationResultCode', + ) + details: str = proto.Field( + proto.STRING, + number=3, + ) + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + transformation: 'PrimitiveTransformation' = proto.Field( + proto.MESSAGE, + number=3, + message='PrimitiveTransformation', + ) + field_transformations: MutableSequence['FieldTransformation'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='FieldTransformation', + ) + record_suppress: 'RecordSuppression' = proto.Field( + proto.MESSAGE, + number=6, + message='RecordSuppression', + ) + results: MutableSequence[SummaryResult] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=SummaryResult, + ) + transformed_bytes: int = proto.Field( + proto.INT64, + number=7, + ) + + +class TransformationDescription(proto.Message): + r"""A flattened description of a ``PrimitiveTransformation`` or + ``RecordSuppression``. + + Attributes: + type_ (google.cloud.dlp_v2.types.TransformationType): + The transformation type. + description (str): + A description of the transformation. This is empty for a + RECORD_SUPPRESSION, or is the output of calling toString() + on the ``PrimitiveTransformation`` protocol buffer message + for any other type of transformation. + condition (str): + A human-readable string representation of the + ``RecordCondition`` corresponding to this transformation. + Set if a ``RecordCondition`` was used to determine whether + or not to apply this transformation. + + Examples: \* (age_field > 85) \* (age_field <= 18) \* + (zip_field exists) \* (zip_field == 01234) && (city_field != + "Springville") \* (zip_field == 01234) && (age_field <= 18) + && (city_field exists) + info_type (google.cloud.dlp_v2.types.InfoType): + Set if the transformation was limited to a specific + ``InfoType``. + """ + + type_: 'TransformationType' = proto.Field( + proto.ENUM, + number=1, + enum='TransformationType', + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + condition: str = proto.Field( + proto.STRING, + number=3, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=4, + message=storage.InfoType, + ) + + +class TransformationDetails(proto.Message): + r"""Details about a single transformation. This object contains a + description of the transformation, information about whether the + transformation was successfully applied, and the precise + location where the transformation occurred. These details are + stored in a user-specified BigQuery table. + + Attributes: + resource_name (str): + The name of the job that completed the + transformation. + container_name (str): + The top level name of the container where the + transformation is located (this will be the + source file name or table name). + transformation (MutableSequence[google.cloud.dlp_v2.types.TransformationDescription]): + Description of transformation. This would only contain more + than one element if there were multiple matching + transformations and which one to apply was ambiguous. Not + set for states that contain no transformation, currently + only state that contains no transformation is + TransformationResultStateType.METADATA_UNRETRIEVABLE. + status_details (google.cloud.dlp_v2.types.TransformationResultStatus): + Status of the transformation, if + transformation was not successful, this will + specify what caused it to fail, otherwise it + will show that the transformation was + successful. + transformed_bytes (int): + The number of bytes that were transformed. If + transformation was unsuccessful or did not take + place because there was no content to transform, + this will be zero. + transformation_location (google.cloud.dlp_v2.types.TransformationLocation): + The precise location of the transformed + content in the original container. + """ + + resource_name: str = proto.Field( + proto.STRING, + number=1, + ) + container_name: str = proto.Field( + proto.STRING, + number=2, + ) + transformation: MutableSequence['TransformationDescription'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='TransformationDescription', + ) + status_details: 'TransformationResultStatus' = proto.Field( + proto.MESSAGE, + number=4, + message='TransformationResultStatus', + ) + transformed_bytes: int = proto.Field( + proto.INT64, + number=5, + ) + transformation_location: 'TransformationLocation' = proto.Field( + proto.MESSAGE, + number=6, + message='TransformationLocation', + ) + + +class TransformationLocation(proto.Message): + r"""Specifies the location of a transformation. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + finding_id (str): + For infotype transformations, link to the + corresponding findings ID so that location + information does not need to be duplicated. Each + findings ID correlates to an entry in the + findings output table, this table only gets + created when users specify to save findings (add + the save findings action to the request). + + This field is a member of `oneof`_ ``location_type``. + record_transformation (google.cloud.dlp_v2.types.RecordTransformation): + For record transformations, provide a field + and container information. + + This field is a member of `oneof`_ ``location_type``. + container_type (google.cloud.dlp_v2.types.TransformationContainerType): + Information about the functionality of the + container where this finding occurred, if + available. + """ + + finding_id: str = proto.Field( + proto.STRING, + number=1, + oneof='location_type', + ) + record_transformation: 'RecordTransformation' = proto.Field( + proto.MESSAGE, + number=2, + oneof='location_type', + message='RecordTransformation', + ) + container_type: 'TransformationContainerType' = proto.Field( + proto.ENUM, + number=3, + enum='TransformationContainerType', + ) + + +class RecordTransformation(proto.Message): + r""" + + Attributes: + field_id (google.cloud.dlp_v2.types.FieldId): + For record transformations, provide a field. + container_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Findings container modification timestamp, if + applicable. + container_version (str): + Container version, if available ("generation" + for Cloud Storage). + """ + + field_id: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + container_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + container_version: str = proto.Field( + proto.STRING, + number=3, + ) + + +class TransformationResultStatus(proto.Message): + r""" + + Attributes: + result_status_type (google.cloud.dlp_v2.types.TransformationResultStatusType): + Transformation result status type, this will + be either SUCCESS, or it will be the reason for + why the transformation was not completely + successful. + details (google.rpc.status_pb2.Status): + Detailed error codes and messages + """ + + result_status_type: 'TransformationResultStatusType' = proto.Field( + proto.ENUM, + number=1, + enum='TransformationResultStatusType', + ) + details: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + + +class TransformationDetailsStorageConfig(proto.Message): + r"""Config for storing transformation details. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + The BigQuery table in which to store the output. This may be + an existing table or in a new table in an existing dataset. + If table_id is not set a new one will be generated for you + with the following format: + dlp_googleapis_transformation_details_yyyy_mm_dd_[dlp_job_id]. + Pacific time zone will be used for generating the date + details. + + This field is a member of `oneof`_ ``type``. + """ + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.BigQueryTable, + ) + + +class Schedule(proto.Message): + r"""Schedule for inspect job triggers. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + recurrence_period_duration (google.protobuf.duration_pb2.Duration): + With this option a job is started on a + regular periodic basis. For example: every day + (86400 seconds). + A scheduled start time will be skipped if the + previous execution has not ended when its + scheduled time occurs. + This value must be set to a time duration + greater than or equal to 1 day and can be no + longer than 60 days. + + This field is a member of `oneof`_ ``option``. + """ + + recurrence_period_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + oneof='option', + message=duration_pb2.Duration, + ) + + +class Manual(proto.Message): + r"""Job trigger option for hybrid jobs. Jobs must be manually + created and finished. + + """ + + +class InspectTemplate(proto.Message): + r"""The inspectTemplate contains a configuration (set of types of + sensitive data to be detected) to be used anywhere you otherwise + would normally specify InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates to learn + more. + + Attributes: + name (str): + Output only. The template name. + + The template will have one of the following formats: + ``projects/PROJECT_ID/inspectTemplates/TEMPLATE_ID`` OR + ``organizations/ORGANIZATION_ID/inspectTemplates/TEMPLATE_ID``; + display_name (str): + Display name (max 256 chars). + description (str): + Short description (max 256 chars). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of an + inspectTemplate. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of an + inspectTemplate. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + The core content of the template. + Configuration of the scanning process. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=6, + message='InspectConfig', + ) + + +class DeidentifyTemplate(proto.Message): + r"""DeidentifyTemplates contains instructions on how to + de-identify content. See + https://cloud.google.com/dlp/docs/concepts-templates to learn + more. + + Attributes: + name (str): + Output only. The template name. + + The template will have one of the following formats: + ``projects/PROJECT_ID/deidentifyTemplates/TEMPLATE_ID`` OR + ``organizations/ORGANIZATION_ID/deidentifyTemplates/TEMPLATE_ID`` + display_name (str): + Display name (max 256 chars). + description (str): + Short description (max 256 chars). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of an + inspectTemplate. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of an + inspectTemplate. + deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): + The core content of the template. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + deidentify_config: 'DeidentifyConfig' = proto.Field( + proto.MESSAGE, + number=6, + message='DeidentifyConfig', + ) + + +class Error(proto.Message): + r"""Details information about an error encountered during job + execution or the results of an unsuccessful activation of the + JobTrigger. + + Attributes: + details (google.rpc.status_pb2.Status): + Detailed error codes and messages. + timestamps (MutableSequence[google.protobuf.timestamp_pb2.Timestamp]): + The times the error occurred. + """ + + details: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + timestamps: MutableSequence[timestamp_pb2.Timestamp] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class JobTrigger(proto.Message): + r"""Contains a configuration to make dlp api calls on a repeating + basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers to learn + more. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Unique resource name for the triggeredJob, assigned by the + service when the triggeredJob is created, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + display_name (str): + Display name (max 100 chars) + description (str): + User provided description (max 256 chars) + inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): + For inspect jobs, a snapshot of the + configuration. + + This field is a member of `oneof`_ ``job``. + triggers (MutableSequence[google.cloud.dlp_v2.types.JobTrigger.Trigger]): + A list of triggers which will be OR'ed + together. Only one in the list needs to trigger + for a job to be started. The list may contain + only a single Schedule trigger and must have at + least one object. + errors (MutableSequence[google.cloud.dlp_v2.types.Error]): + Output only. A stream of errors encountered + when the trigger was activated. Repeated errors + may result in the JobTrigger automatically being + paused. Will return the last 100 errors. + Whenever the JobTrigger is modified this list + will be cleared. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of a + triggeredJob. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of a + triggeredJob. + last_run_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp of the last time + this trigger executed. + status (google.cloud.dlp_v2.types.JobTrigger.Status): + Required. A status for this trigger. + """ + class Status(proto.Enum): + r"""Whether the trigger is currently active. If PAUSED or + CANCELLED, no jobs will be created with this configuration. The + service may automatically pause triggers experiencing frequent + errors. To restart a job, set the status to HEALTHY after + correcting user errors. + + Values: + STATUS_UNSPECIFIED (0): + Unused. + HEALTHY (1): + Trigger is healthy. + PAUSED (2): + Trigger is temporarily paused. + CANCELLED (3): + Trigger is cancelled and can not be resumed. + """ + STATUS_UNSPECIFIED = 0 + HEALTHY = 1 + PAUSED = 2 + CANCELLED = 3 + + class Trigger(proto.Message): + r"""What event needs to occur for a new job to be started. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + schedule (google.cloud.dlp_v2.types.Schedule): + Create a job on a repeating basis based on + the elapse of time. + + This field is a member of `oneof`_ ``trigger``. + manual (google.cloud.dlp_v2.types.Manual): + For use with hybrid jobs. Jobs must be + manually created and finished. + + This field is a member of `oneof`_ ``trigger``. + """ + + schedule: 'Schedule' = proto.Field( + proto.MESSAGE, + number=1, + oneof='trigger', + message='Schedule', + ) + manual: 'Manual' = proto.Field( + proto.MESSAGE, + number=2, + oneof='trigger', + message='Manual', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + inspect_job: 'InspectJobConfig' = proto.Field( + proto.MESSAGE, + number=4, + oneof='job', + message='InspectJobConfig', + ) + triggers: MutableSequence[Trigger] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=Trigger, + ) + errors: MutableSequence['Error'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='Error', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + last_run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + status: Status = proto.Field( + proto.ENUM, + number=10, + enum=Status, + ) + + +class Action(proto.Message): + r"""A task to execute on the completion of a job. + See https://cloud.google.com/dlp/docs/concepts-actions to learn + more. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + save_findings (google.cloud.dlp_v2.types.Action.SaveFindings): + Save resulting findings in a provided + location. + + This field is a member of `oneof`_ ``action``. + pub_sub (google.cloud.dlp_v2.types.Action.PublishToPubSub): + Publish a notification to a Pub/Sub topic. + + This field is a member of `oneof`_ ``action``. + publish_summary_to_cscc (google.cloud.dlp_v2.types.Action.PublishSummaryToCscc): + Publish summary to Cloud Security Command + Center (Alpha). + + This field is a member of `oneof`_ ``action``. + publish_findings_to_cloud_data_catalog (google.cloud.dlp_v2.types.Action.PublishFindingsToCloudDataCatalog): + Publish findings to Cloud Datahub. + + This field is a member of `oneof`_ ``action``. + deidentify (google.cloud.dlp_v2.types.Action.Deidentify): + Create a de-identified copy of the input + data. + + This field is a member of `oneof`_ ``action``. + job_notification_emails (google.cloud.dlp_v2.types.Action.JobNotificationEmails): + Sends an email when the job completes. The email goes to IAM + project owners and technical `Essential + Contacts `__. + + This field is a member of `oneof`_ ``action``. + publish_to_stackdriver (google.cloud.dlp_v2.types.Action.PublishToStackdriver): + Enable Stackdriver metric dlp.googleapis.com/finding_count. + + This field is a member of `oneof`_ ``action``. + """ + + class SaveFindings(proto.Message): + r"""If set, the detailed findings will be persisted to the + specified OutputStorageConfig. Only a single instance of this + action can be specified. + Compatible with: Inspect, Risk + + Attributes: + output_config (google.cloud.dlp_v2.types.OutputStorageConfig): + Location to store findings outside of DLP. + """ + + output_config: 'OutputStorageConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='OutputStorageConfig', + ) + + class PublishToPubSub(proto.Message): + r"""Publish a message into a given Pub/Sub topic when DlpJob has + completed. The message contains a single field, ``DlpJobName``, + which is equal to the finished job's + ```DlpJob.name`` `__. + Compatible with: Inspect, Risk + + Attributes: + topic (str): + Cloud Pub/Sub topic to send notifications to. + The topic must have given publishing access + rights to the DLP API service account executing + the long running DlpJob sending the + notifications. Format is + projects/{project}/topics/{topic}. + """ + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + + class PublishSummaryToCscc(proto.Message): + r"""Publish the result summary of a DlpJob to the Cloud Security + Command Center (CSCC Alpha). + This action is only available for projects which are parts of an + organization and whitelisted for the alpha Cloud Security + Command Center. + The action will publish the count of finding instances and their + info types. The summary of findings will be persisted in CSCC + and are governed by CSCC service-specific policy, see + https://cloud.google.com/terms/service-terms Only a single + instance of this action can be specified. Compatible with: + Inspect + + """ + + class PublishFindingsToCloudDataCatalog(proto.Message): + r"""Publish findings of a DlpJob to Data Catalog. In Data Catalog, tag + templates are applied to the resource that Cloud DLP scanned. Data + Catalog tag templates are stored in the same project and region + where the BigQuery table exists. For Cloud DLP to create and apply + the tag template, the Cloud DLP service agent must have the + ``roles/datacatalog.tagTemplateOwner`` permission on the project. + The tag template contains fields summarizing the results of the + DlpJob. Any field values previously written by another DlpJob are + deleted. [InfoType naming patterns][google.privacy.dlp.v2.InfoType] + are strictly enforced when using this feature. + + Findings are persisted in Data Catalog storage and are governed by + service-specific policies for Data Catalog. For more information, + see `Service Specific + Terms `__. + + Only a single instance of this action can be specified. This action + is allowed only if all resources being scanned are BigQuery tables. + Compatible with: Inspect + + """ + + class Deidentify(proto.Message): + r"""Create a de-identified copy of the requested table or files. + + A TransformationDetail will be created for each transformation. + + If any rows in BigQuery are skipped during de-identification + (transformation errors or row size exceeds BigQuery insert API + limits) they are placed in the failure output table. If the original + row exceeds the BigQuery insert API limit it will be truncated when + written to the failure output table. The failure output table can be + set in the + action.deidentify.output.big_query_output.deidentified_failure_output_table + field, if no table is set, a table will be automatically created in + the same project and dataset as the original table. + + Compatible with: Inspect + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + transformation_config (google.cloud.dlp_v2.types.TransformationConfig): + User specified deidentify templates and + configs for structured, unstructured, and image + files. + transformation_details_storage_config (google.cloud.dlp_v2.types.TransformationDetailsStorageConfig): + Config for storing transformation details. This is separate + from the de-identified content, and contains metadata about + the successful transformations and/or failures that occurred + while de-identifying. This needs to be set in order for + users to access information about the status of each + transformation (see + [TransformationDetails][google.privacy.dlp.v2.TransformationDetails] + message for more information about what is noted). + cloud_storage_output (str): + Required. User settable Cloud Storage bucket + and folders to store de-identified files. This + field must be set for cloud storage + deidentification. The output Cloud Storage + bucket must be different from the input bucket. + De-identified files will overwrite files in the + output path. + Form of: gs://bucket/folder/ or gs://bucket + + This field is a member of `oneof`_ ``output``. + file_types_to_transform (MutableSequence[google.cloud.dlp_v2.types.FileType]): + List of user-specified file type groups to transform. If + specified, only the files with these filetypes will be + transformed. If empty, all supported files will be + transformed. Supported types may be automatically added over + time. If a file type is set in this field that isn't + supported by the Deidentify action then the job will fail + and will not be successfully created/started. Currently the + only filetypes supported are: IMAGES, TEXT_FILES, CSV, TSV. + """ + + transformation_config: 'TransformationConfig' = proto.Field( + proto.MESSAGE, + number=7, + message='TransformationConfig', + ) + transformation_details_storage_config: 'TransformationDetailsStorageConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='TransformationDetailsStorageConfig', + ) + cloud_storage_output: str = proto.Field( + proto.STRING, + number=9, + oneof='output', + ) + file_types_to_transform: MutableSequence[storage.FileType] = proto.RepeatedField( + proto.ENUM, + number=8, + enum=storage.FileType, + ) + + class JobNotificationEmails(proto.Message): + r"""Sends an email when the job completes. The email goes to IAM project + owners and technical `Essential + Contacts `__. + + """ + + class PublishToStackdriver(proto.Message): + r"""Enable Stackdriver metric dlp.googleapis.com/finding_count. This + will publish a metric to stack driver on each infotype requested and + how many findings were found for it. CustomDetectors will be + bucketed as 'Custom' under the Stackdriver label 'info_type'. + + """ + + save_findings: SaveFindings = proto.Field( + proto.MESSAGE, + number=1, + oneof='action', + message=SaveFindings, + ) + pub_sub: PublishToPubSub = proto.Field( + proto.MESSAGE, + number=2, + oneof='action', + message=PublishToPubSub, + ) + publish_summary_to_cscc: PublishSummaryToCscc = proto.Field( + proto.MESSAGE, + number=3, + oneof='action', + message=PublishSummaryToCscc, + ) + publish_findings_to_cloud_data_catalog: PublishFindingsToCloudDataCatalog = proto.Field( + proto.MESSAGE, + number=5, + oneof='action', + message=PublishFindingsToCloudDataCatalog, + ) + deidentify: Deidentify = proto.Field( + proto.MESSAGE, + number=7, + oneof='action', + message=Deidentify, + ) + job_notification_emails: JobNotificationEmails = proto.Field( + proto.MESSAGE, + number=8, + oneof='action', + message=JobNotificationEmails, + ) + publish_to_stackdriver: PublishToStackdriver = proto.Field( + proto.MESSAGE, + number=9, + oneof='action', + message=PublishToStackdriver, + ) + + +class TransformationConfig(proto.Message): + r"""User specified templates and configs for how to deidentify + structured, unstructures, and image files. User must provide + either a unstructured deidentify template or at least one redact + image config. + + Attributes: + deidentify_template (str): + De-identify template. If this template is specified, it will + serve as the default de-identify template. This template + cannot contain ``record_transformations`` since it can be + used for unstructured content such as free-form text files. + If this template is not set, a default + ``ReplaceWithInfoTypeConfig`` will be used to de-identify + unstructured content. + structured_deidentify_template (str): + Structured de-identify template. If this template is + specified, it will serve as the de-identify template for + structured content such as delimited files and tables. If + this template is not set but the ``deidentify_template`` is + set, then ``deidentify_template`` will also apply to the + structured content. If neither template is set, a default + ``ReplaceWithInfoTypeConfig`` will be used to de-identify + structured content. + image_redact_template (str): + Image redact template. + If this template is specified, it will serve as + the de-identify template for images. If this + template is not set, all findings in the image + will be redacted with a black box. + """ + + deidentify_template: str = proto.Field( + proto.STRING, + number=1, + ) + structured_deidentify_template: str = proto.Field( + proto.STRING, + number=2, + ) + image_redact_template: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CreateInspectTemplateRequest(proto.Message): + r"""Request message for CreateInspectTemplate. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + Required. The InspectTemplate to create. + template_id (str): + The template id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_template: 'InspectTemplate' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectTemplate', + ) + template_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateInspectTemplateRequest(proto.Message): + r"""Request message for UpdateInspectTemplate. + + Attributes: + name (str): + Required. Resource name of organization and inspectTemplate + to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + New InspectTemplate value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_template: 'InspectTemplate' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectTemplate', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetInspectTemplateRequest(proto.Message): + r"""Request message for GetInspectTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListInspectTemplatesRequest(proto.Message): + r"""Request message for ListInspectTemplates. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ``ListInspectTemplates``. + page_size (int): + Size of the page, can be limited by the + server. If zero server returns a page of max + size 100. + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the template was + created. + - ``update_time``: corresponds to the time the template was + last updated. + - ``name``: corresponds to the template's name. + - ``display_name``: corresponds to the template's display + name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInspectTemplatesResponse(proto.Message): + r"""Response message for ListInspectTemplates. + + Attributes: + inspect_templates (MutableSequence[google.cloud.dlp_v2.types.InspectTemplate]): + List of inspectTemplates, up to page_size in + ListInspectTemplatesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListInspectTemplates request. + """ + + @property + def raw_page(self): + return self + + inspect_templates: MutableSequence['InspectTemplate'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='InspectTemplate', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteInspectTemplateRequest(proto.Message): + r"""Request message for DeleteInspectTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateJobTriggerRequest(proto.Message): + r"""Request message for CreateJobTrigger. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + Required. The JobTrigger to create. + trigger_id (str): + The trigger id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + job_trigger: 'JobTrigger' = proto.Field( + proto.MESSAGE, + number=2, + message='JobTrigger', + ) + trigger_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ActivateJobTriggerRequest(proto.Message): + r"""Request message for ActivateJobTrigger. + + Attributes: + name (str): + Required. Resource name of the trigger to activate, for + example ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateJobTriggerRequest(proto.Message): + r"""Request message for UpdateJobTrigger. + + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + New JobTrigger value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + job_trigger: 'JobTrigger' = proto.Field( + proto.MESSAGE, + number=2, + message='JobTrigger', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetJobTriggerRequest(proto.Message): + r"""Request message for GetJobTrigger. + + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDlpJobRequest(proto.Message): + r"""Request message for CreateDlpJobRequest. Used to initiate + long running jobs such as calculating risk metrics or inspecting + Google Cloud Storage. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): + An inspection job scans a storage repository + for InfoTypes. + + This field is a member of `oneof`_ ``job``. + risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): + A risk analysis job calculates + re-identification risk metrics for a BigQuery + table. + + This field is a member of `oneof`_ ``job``. + job_id (str): + The job id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_job: 'InspectJobConfig' = proto.Field( + proto.MESSAGE, + number=2, + oneof='job', + message='InspectJobConfig', + ) + risk_job: 'RiskAnalysisJobConfig' = proto.Field( + proto.MESSAGE, + number=3, + oneof='job', + message='RiskAnalysisJobConfig', + ) + job_id: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListJobTriggersRequest(proto.Message): + r"""Request message for ListJobTriggers. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ListJobTriggers. ``order_by`` field must not change for + subsequent calls. + page_size (int): + Size of the page, can be limited by a server. + order_by (str): + Comma separated list of triggeredJob fields to order by, + followed by ``asc`` or ``desc`` postfix. This list is + case-insensitive, default sorting order is ascending, + redundant space characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the JobTrigger + was created. + - ``update_time``: corresponds to the time the JobTrigger + was last updated. + - ``last_run_time``: corresponds to the last time the + JobTrigger ran. + - ``name``: corresponds to the JobTrigger's name. + - ``display_name``: corresponds to the JobTrigger's display + name. + - ``status``: corresponds to JobTrigger's status. + filter (str): + Allows filtering. + + Supported syntax: + + - Filter expressions are made up of one or more + restrictions. + - Restrictions can be combined by ``AND`` or ``OR`` logical + operators. A sequence of restrictions implicitly uses + ``AND``. + - A restriction has the form of + ``{field} {operator} {value}``. + - Supported fields/values for inspect triggers: + + - ``status`` - HEALTHY|PAUSED|CANCELLED + - ``inspected_storage`` - + DATASTORE|CLOUD_STORAGE|BIGQUERY + - 'last_run_time\` - RFC 3339 formatted timestamp, + surrounded by quotation marks. Nanoseconds are + ignored. + - 'error_count' - Number of errors that have occurred + while running. + + - The operator must be ``=`` or ``!=`` for status and + inspected_storage. + + Examples: + + - inspected_storage = cloud_storage AND status = HEALTHY + - inspected_storage = cloud_storage OR inspected_storage = + bigquery + - inspected_storage = cloud_storage AND (state = PAUSED OR + state = HEALTHY) + - last_run_time > "2017-12-12T00:00:00+00:00" + + The length of this field should be no more than 500 + characters. + type_ (google.cloud.dlp_v2.types.DlpJobType): + The type of jobs. Will use ``DlpJobType.INSPECT`` if not + set. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + type_: 'DlpJobType' = proto.Field( + proto.ENUM, + number=6, + enum='DlpJobType', + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ListJobTriggersResponse(proto.Message): + r"""Response message for ListJobTriggers. + + Attributes: + job_triggers (MutableSequence[google.cloud.dlp_v2.types.JobTrigger]): + List of triggeredJobs, up to page_size in + ListJobTriggersRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListJobTriggers request. + """ + + @property + def raw_page(self): + return self + + job_triggers: MutableSequence['JobTrigger'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='JobTrigger', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteJobTriggerRequest(proto.Message): + r"""Request message for DeleteJobTrigger. + + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class InspectJobConfig(proto.Message): + r"""Controls what and how to inspect for findings. + + Attributes: + storage_config (google.cloud.dlp_v2.types.StorageConfig): + The data to scan. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + How and what to scan for. + inspect_template_name (str): + If provided, will be used as the default for all values in + InspectConfig. ``inspect_config`` will be merged into the + values persisted as part of the template. + actions (MutableSequence[google.cloud.dlp_v2.types.Action]): + Actions to execute at the completion of the + job. + """ + + storage_config: storage.StorageConfig = proto.Field( + proto.MESSAGE, + number=1, + message=storage.StorageConfig, + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=3, + ) + actions: MutableSequence['Action'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='Action', + ) + + +class DataProfileAction(proto.Message): + r"""A task to execute when a data profile has been generated. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + export_data (google.cloud.dlp_v2.types.DataProfileAction.Export): + Export data profiles into a provided + location. + + This field is a member of `oneof`_ ``action``. + pub_sub_notification (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification): + Publish a message into the Pub/Sub topic. + + This field is a member of `oneof`_ ``action``. + """ + class EventType(proto.Enum): + r"""Types of event that can trigger an action. + + Values: + EVENT_TYPE_UNSPECIFIED (0): + Unused. + NEW_PROFILE (1): + New profile (not a re-profile). + CHANGED_PROFILE (2): + Changed one of the following profile metrics: + + - Table data risk score + - Table sensitivity score + - Table resource visibility + - Table encryption type + - Table predicted infoTypes + - Table other infoTypes + SCORE_INCREASED (3): + Table data risk score or sensitivity score + increased. + ERROR_CHANGED (4): + A user (non-internal) error occurred. + """ + EVENT_TYPE_UNSPECIFIED = 0 + NEW_PROFILE = 1 + CHANGED_PROFILE = 2 + SCORE_INCREASED = 3 + ERROR_CHANGED = 4 + + class Export(proto.Message): + r"""If set, the detailed data profiles will be persisted to the + location of your choice whenever updated. + + Attributes: + profile_table (google.cloud.dlp_v2.types.BigQueryTable): + Store all table and column profiles in an + existing table or a new table in an existing + dataset. Each re-generation will result in a new + row in BigQuery. + """ + + profile_table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=1, + message=storage.BigQueryTable, + ) + + class PubSubNotification(proto.Message): + r"""Send a Pub/Sub message into the given Pub/Sub topic to connect other + systems to data profile generation. The message payload data will be + the byte serialization of ``DataProfilePubSubMessage``. + + Attributes: + topic (str): + Cloud Pub/Sub topic to send notifications to. + Format is projects/{project}/topics/{topic}. + event (google.cloud.dlp_v2.types.DataProfileAction.EventType): + The type of event that triggers a Pub/Sub. At most one + ``PubSubNotification`` per EventType is permitted. + pubsub_condition (google.cloud.dlp_v2.types.DataProfilePubSubCondition): + Conditions (e.g., data risk or sensitivity + level) for triggering a Pub/Sub. + detail_of_message (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification.DetailLevel): + How much data to include in the Pub/Sub message. If the user + wishes to limit the size of the message, they can use + resource_name and fetch the profile fields they wish to. Per + table profile (not per column). + """ + class DetailLevel(proto.Enum): + r"""The levels of detail that can be included in the Pub/Sub + message. + + Values: + DETAIL_LEVEL_UNSPECIFIED (0): + Unused. + TABLE_PROFILE (1): + The full table data profile. + RESOURCE_NAME (2): + The resource name of the table. + """ + DETAIL_LEVEL_UNSPECIFIED = 0 + TABLE_PROFILE = 1 + RESOURCE_NAME = 2 + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + event: 'DataProfileAction.EventType' = proto.Field( + proto.ENUM, + number=2, + enum='DataProfileAction.EventType', + ) + pubsub_condition: 'DataProfilePubSubCondition' = proto.Field( + proto.MESSAGE, + number=3, + message='DataProfilePubSubCondition', + ) + detail_of_message: 'DataProfileAction.PubSubNotification.DetailLevel' = proto.Field( + proto.ENUM, + number=4, + enum='DataProfileAction.PubSubNotification.DetailLevel', + ) + + export_data: Export = proto.Field( + proto.MESSAGE, + number=1, + oneof='action', + message=Export, + ) + pub_sub_notification: PubSubNotification = proto.Field( + proto.MESSAGE, + number=2, + oneof='action', + message=PubSubNotification, + ) + + +class DataProfileJobConfig(proto.Message): + r"""Configuration for setting up a job to scan resources for profile + generation. Only one data profile configuration may exist per + organization, folder, or project. + + The generated data profiles are retained according to the [data + retention policy] + (https://cloud.google.com/dlp/docs/data-profiles#retention). + + Attributes: + location (google.cloud.dlp_v2.types.DataProfileLocation): + The data to scan. + project_id (str): + The project that will run the scan. The DLP + service account that exists within this project + must have access to all resources that are + profiled, and the Cloud DLP API must be enabled. + inspect_templates (MutableSequence[str]): + Detection logic for profile generation. + + Not all template features are used by profiles. + FindingLimits, include_quote and exclude_info_types have no + impact on data profiling. + + Multiple templates may be provided if there is data in + multiple regions. At most one template must be specified + per-region (including "global"). Each region is scanned + using the applicable template. If no region-specific + template is specified, but a "global" template is specified, + it will be copied to that region and used instead. If no + global or region-specific template is provided for a region + with data, that region's data will not be scanned. + + For more information, see + https://cloud.google.com/dlp/docs/data-profiles#data_residency. + data_profile_actions (MutableSequence[google.cloud.dlp_v2.types.DataProfileAction]): + Actions to execute at the completion of the + job. + """ + + location: 'DataProfileLocation' = proto.Field( + proto.MESSAGE, + number=1, + message='DataProfileLocation', + ) + project_id: str = proto.Field( + proto.STRING, + number=5, + ) + inspect_templates: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + data_profile_actions: MutableSequence['DataProfileAction'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='DataProfileAction', + ) + + +class DataProfileLocation(proto.Message): + r"""The data that will be profiled. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + organization_id (int): + The ID of an organization to scan. + + This field is a member of `oneof`_ ``location``. + folder_id (int): + The ID of the Folder within an organization + to scan. + + This field is a member of `oneof`_ ``location``. + """ + + organization_id: int = proto.Field( + proto.INT64, + number=1, + oneof='location', + ) + folder_id: int = proto.Field( + proto.INT64, + number=2, + oneof='location', + ) + + +class DlpJob(proto.Message): + r"""Combines all of the information about a DLP job. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The server-assigned name. + type_ (google.cloud.dlp_v2.types.DlpJobType): + The type of job. + state (google.cloud.dlp_v2.types.DlpJob.JobState): + State of a job. + risk_details (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails): + Results from analyzing risk of a data source. + + This field is a member of `oneof`_ ``details``. + inspect_details (google.cloud.dlp_v2.types.InspectDataSourceDetails): + Results from inspecting a data source. + + This field is a member of `oneof`_ ``details``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job was created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job finished. + job_trigger_name (str): + If created by a job trigger, the resource + name of the trigger that instantiated the job. + errors (MutableSequence[google.cloud.dlp_v2.types.Error]): + A stream of errors encountered running the + job. + """ + class JobState(proto.Enum): + r"""Possible states of a job. New items may be added. + + Values: + JOB_STATE_UNSPECIFIED (0): + Unused. + PENDING (1): + The job has not yet started. + RUNNING (2): + The job is currently running. Once a job has + finished it will transition to FAILED or DONE. + DONE (3): + The job is no longer running. + CANCELED (4): + The job was canceled before it could be + completed. + FAILED (5): + The job had an error and did not complete. + ACTIVE (6): + The job is currently accepting findings via + hybridInspect. A hybrid job in ACTIVE state may + continue to have findings added to it through + the calling of hybridInspect. After the job has + finished no more calls to hybridInspect may be + made. ACTIVE jobs can transition to DONE. + """ + JOB_STATE_UNSPECIFIED = 0 + PENDING = 1 + RUNNING = 2 + DONE = 3 + CANCELED = 4 + FAILED = 5 + ACTIVE = 6 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'DlpJobType' = proto.Field( + proto.ENUM, + number=2, + enum='DlpJobType', + ) + state: JobState = proto.Field( + proto.ENUM, + number=3, + enum=JobState, + ) + risk_details: 'AnalyzeDataSourceRiskDetails' = proto.Field( + proto.MESSAGE, + number=4, + oneof='details', + message='AnalyzeDataSourceRiskDetails', + ) + inspect_details: 'InspectDataSourceDetails' = proto.Field( + proto.MESSAGE, + number=5, + oneof='details', + message='InspectDataSourceDetails', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + job_trigger_name: str = proto.Field( + proto.STRING, + number=10, + ) + errors: MutableSequence['Error'] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message='Error', + ) + + +class GetDlpJobRequest(proto.Message): + r"""The request message for [DlpJobs.GetDlpJob][]. + + Attributes: + name (str): + Required. The name of the DlpJob resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDlpJobsRequest(proto.Message): + r"""The request message for listing DLP jobs. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + filter (str): + Allows filtering. + + Supported syntax: + + - Filter expressions are made up of one or more + restrictions. + - Restrictions can be combined by ``AND`` or ``OR`` logical + operators. A sequence of restrictions implicitly uses + ``AND``. + - A restriction has the form of + ``{field} {operator} {value}``. + - Supported fields/values for inspect jobs: + + - ``state`` - PENDING|RUNNING|CANCELED|FINISHED|FAILED + - ``inspected_storage`` - + DATASTORE|CLOUD_STORAGE|BIGQUERY + - ``trigger_name`` - The name of the trigger that + created the job. + - 'end_time\` - Corresponds to the time the job + finished. + - 'start_time\` - Corresponds to the time the job + finished. + + - Supported fields for risk analysis jobs: + + - ``state`` - RUNNING|CANCELED|FINISHED|FAILED + - 'end_time\` - Corresponds to the time the job + finished. + - 'start_time\` - Corresponds to the time the job + finished. + + - The operator must be ``=`` or ``!=``. + + Examples: + + - inspected_storage = cloud_storage AND state = done + - inspected_storage = cloud_storage OR inspected_storage = + bigquery + - inspected_storage = cloud_storage AND (state = done OR + state = canceled) + - end_time > "2017-12-12T00:00:00+00:00" + + The length of this field should be no more than 500 + characters. + page_size (int): + The standard list page size. + page_token (str): + The standard list page token. + type_ (google.cloud.dlp_v2.types.DlpJobType): + The type of job. Defaults to ``DlpJobType.INSPECT`` + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc, end_time asc, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the job was + created. + - ``end_time``: corresponds to the time the job ended. + - ``name``: corresponds to the job's name. + - ``state``: corresponds to ``state`` + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + type_: 'DlpJobType' = proto.Field( + proto.ENUM, + number=5, + enum='DlpJobType', + ) + order_by: str = proto.Field( + proto.STRING, + number=6, + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ListDlpJobsResponse(proto.Message): + r"""The response message for listing DLP jobs. + + Attributes: + jobs (MutableSequence[google.cloud.dlp_v2.types.DlpJob]): + A list of DlpJobs that matches the specified + filter in the request. + next_page_token (str): + The standard List next-page token. + """ + + @property + def raw_page(self): + return self + + jobs: MutableSequence['DlpJob'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DlpJob', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CancelDlpJobRequest(proto.Message): + r"""The request message for canceling a DLP job. + + Attributes: + name (str): + Required. The name of the DlpJob resource to + be cancelled. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FinishDlpJobRequest(proto.Message): + r"""The request message for finishing a DLP hybrid job. + + Attributes: + name (str): + Required. The name of the DlpJob resource to + be cancelled. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteDlpJobRequest(proto.Message): + r"""The request message for deleting a DLP job. + + Attributes: + name (str): + Required. The name of the DlpJob resource to + be deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDeidentifyTemplateRequest(proto.Message): + r"""Request message for CreateDeidentifyTemplate. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + Required. The DeidentifyTemplate to create. + template_id (str): + The template id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + deidentify_template: 'DeidentifyTemplate' = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyTemplate', + ) + template_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateDeidentifyTemplateRequest(proto.Message): + r"""Request message for UpdateDeidentifyTemplate. + + Attributes: + name (str): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + New DeidentifyTemplate value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + deidentify_template: 'DeidentifyTemplate' = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyTemplate', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetDeidentifyTemplateRequest(proto.Message): + r"""Request message for GetDeidentifyTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and deidentify + template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDeidentifyTemplatesRequest(proto.Message): + r"""Request message for ListDeidentifyTemplates. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ``ListDeidentifyTemplates``. + page_size (int): + Size of the page, can be limited by the + server. If zero server returns a page of max + size 100. + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the template was + created. + - ``update_time``: corresponds to the time the template was + last updated. + - ``name``: corresponds to the template's name. + - ``display_name``: corresponds to the template's display + name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDeidentifyTemplatesResponse(proto.Message): + r"""Response message for ListDeidentifyTemplates. + + Attributes: + deidentify_templates (MutableSequence[google.cloud.dlp_v2.types.DeidentifyTemplate]): + List of deidentify templates, up to page_size in + ListDeidentifyTemplatesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListDeidentifyTemplates request. + """ + + @property + def raw_page(self): + return self + + deidentify_templates: MutableSequence['DeidentifyTemplate'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DeidentifyTemplate', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteDeidentifyTemplateRequest(proto.Message): + r"""Request message for DeleteDeidentifyTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and deidentify + template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LargeCustomDictionaryConfig(proto.Message): + r"""Configuration for a custom dictionary created from a data source of + any size up to the maximum size defined in the + `limits `__ page. The artifacts + of dictionary creation are stored in the specified Cloud Storage + location. Consider using ``CustomInfoType.Dictionary`` for smaller + dictionaries that satisfy the size requirements. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + output_path (google.cloud.dlp_v2.types.CloudStoragePath): + Location to store dictionary artifacts in + Cloud Storage. These files will only be + accessible by project owners and the DLP API. If + any of these artifacts are modified, the + dictionary is considered invalid and can no + longer be used. + cloud_storage_file_set (google.cloud.dlp_v2.types.CloudStorageFileSet): + Set of files containing newline-delimited + lists of dictionary phrases. + + This field is a member of `oneof`_ ``source``. + big_query_field (google.cloud.dlp_v2.types.BigQueryField): + Field in a BigQuery table where each cell + represents a dictionary phrase. + + This field is a member of `oneof`_ ``source``. + """ + + output_path: storage.CloudStoragePath = proto.Field( + proto.MESSAGE, + number=1, + message=storage.CloudStoragePath, + ) + cloud_storage_file_set: storage.CloudStorageFileSet = proto.Field( + proto.MESSAGE, + number=2, + oneof='source', + message=storage.CloudStorageFileSet, + ) + big_query_field: storage.BigQueryField = proto.Field( + proto.MESSAGE, + number=3, + oneof='source', + message=storage.BigQueryField, + ) + + +class LargeCustomDictionaryStats(proto.Message): + r"""Summary statistics of a custom dictionary. + + Attributes: + approx_num_phrases (int): + Approximate number of distinct phrases in the + dictionary. + """ + + approx_num_phrases: int = proto.Field( + proto.INT64, + number=1, + ) + + +class StoredInfoTypeConfig(proto.Message): + r"""Configuration for stored infoTypes. All fields and subfield + are provided by the user. For more information, see + https://cloud.google.com/dlp/docs/creating-custom-infotypes. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + display_name (str): + Display name of the StoredInfoType (max 256 + characters). + description (str): + Description of the StoredInfoType (max 256 + characters). + large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryConfig): + StoredInfoType where findings are defined by + a dictionary of phrases. + + This field is a member of `oneof`_ ``type``. + dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): + Store dictionary-based CustomInfoType. + + This field is a member of `oneof`_ ``type``. + regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Store regular expression-based + StoredInfoType. + + This field is a member of `oneof`_ ``type``. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + large_custom_dictionary: 'LargeCustomDictionaryConfig' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='LargeCustomDictionaryConfig', + ) + dictionary: storage.CustomInfoType.Dictionary = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message=storage.CustomInfoType.Dictionary, + ) + regex: storage.CustomInfoType.Regex = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message=storage.CustomInfoType.Regex, + ) + + +class StoredInfoTypeStats(proto.Message): + r"""Statistics for a StoredInfoType. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryStats): + StoredInfoType where findings are defined by + a dictionary of phrases. + + This field is a member of `oneof`_ ``type``. + """ + + large_custom_dictionary: 'LargeCustomDictionaryStats' = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message='LargeCustomDictionaryStats', + ) + + +class StoredInfoTypeVersion(proto.Message): + r"""Version of a StoredInfoType, including the configuration used + to build it, create timestamp, and current state. + + Attributes: + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + StoredInfoType configuration. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Create timestamp of the version. Read-only, + determined by the system when the version is + created. + state (google.cloud.dlp_v2.types.StoredInfoTypeState): + Stored info type version state. Read-only, + updated by the system during dictionary + creation. + errors (MutableSequence[google.cloud.dlp_v2.types.Error]): + Errors that occurred when creating this storedInfoType + version, or anomalies detected in the storedInfoType data + that render it unusable. Only the five most recent errors + will be displayed, with the most recent error appearing + first. + + For example, some of the data for stored custom dictionaries + is put in the user's Cloud Storage bucket, and if this data + is modified or deleted by the user or another system, the + dictionary becomes invalid. + + If any errors occur, fix the problem indicated by the error + message and use the UpdateStoredInfoType API method to + create another version of the storedInfoType to continue + using it, reusing the same ``config`` if it was not the + source of the error. + stats (google.cloud.dlp_v2.types.StoredInfoTypeStats): + Statistics about this storedInfoType version. + """ + + config: 'StoredInfoTypeConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='StoredInfoTypeConfig', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: 'StoredInfoTypeState' = proto.Field( + proto.ENUM, + number=3, + enum='StoredInfoTypeState', + ) + errors: MutableSequence['Error'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='Error', + ) + stats: 'StoredInfoTypeStats' = proto.Field( + proto.MESSAGE, + number=5, + message='StoredInfoTypeStats', + ) + + +class StoredInfoType(proto.Message): + r"""StoredInfoType resource message that contains information + about the current version and any pending updates. + + Attributes: + name (str): + Resource name. + current_version (google.cloud.dlp_v2.types.StoredInfoTypeVersion): + Current version of the stored info type. + pending_versions (MutableSequence[google.cloud.dlp_v2.types.StoredInfoTypeVersion]): + Pending versions of the stored info type. + Empty if no versions are pending. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + current_version: 'StoredInfoTypeVersion' = proto.Field( + proto.MESSAGE, + number=2, + message='StoredInfoTypeVersion', + ) + pending_versions: MutableSequence['StoredInfoTypeVersion'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='StoredInfoTypeVersion', + ) + + +class CreateStoredInfoTypeRequest(proto.Message): + r"""Request message for CreateStoredInfoType. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Required. Configuration of the storedInfoType + to create. + stored_info_type_id (str): + The storedInfoType ID can contain uppercase and lowercase + letters, numbers, and hyphens; that is, it must match the + regular expression: ``[a-zA-Z\d-_]+``. The maximum length is + 100 characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + config: 'StoredInfoTypeConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='StoredInfoTypeConfig', + ) + stored_info_type_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateStoredInfoTypeRequest(proto.Message): + r"""Request message for UpdateStoredInfoType. + + Attributes: + name (str): + Required. Resource name of organization and storedInfoType + to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Updated configuration for the storedInfoType. + If not provided, a new version of the + storedInfoType will be created with the existing + configuration. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + config: 'StoredInfoTypeConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='StoredInfoTypeConfig', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetStoredInfoTypeRequest(proto.Message): + r"""Request message for GetStoredInfoType. + + Attributes: + name (str): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListStoredInfoTypesRequest(proto.Message): + r"""Request message for ListStoredInfoTypes. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ``ListStoredInfoTypes``. + page_size (int): + Size of the page, can be limited by the + server. If zero server returns a page of max + size 100. + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc, display_name, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the most recent + version of the resource was created. + - ``state``: corresponds to the state of the resource. + - ``name``: corresponds to resource name. + - ``display_name``: corresponds to info type's display + name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListStoredInfoTypesResponse(proto.Message): + r"""Response message for ListStoredInfoTypes. + + Attributes: + stored_info_types (MutableSequence[google.cloud.dlp_v2.types.StoredInfoType]): + List of storedInfoTypes, up to page_size in + ListStoredInfoTypesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListStoredInfoTypes request. + """ + + @property + def raw_page(self): + return self + + stored_info_types: MutableSequence['StoredInfoType'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='StoredInfoType', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteStoredInfoTypeRequest(proto.Message): + r"""Request message for DeleteStoredInfoType. + + Attributes: + name (str): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class HybridInspectJobTriggerRequest(proto.Message): + r"""Request to search for potentially sensitive info in a custom + location. + + Attributes: + name (str): + Required. Resource name of the trigger to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): + The item to inspect. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + hybrid_item: 'HybridContentItem' = proto.Field( + proto.MESSAGE, + number=3, + message='HybridContentItem', + ) + + +class HybridInspectDlpJobRequest(proto.Message): + r"""Request to search for potentially sensitive info in a custom + location. + + Attributes: + name (str): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): + The item to inspect. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + hybrid_item: 'HybridContentItem' = proto.Field( + proto.MESSAGE, + number=3, + message='HybridContentItem', + ) + + +class HybridContentItem(proto.Message): + r"""An individual hybrid item to inspect. Will be stored + temporarily during processing. + + Attributes: + item (google.cloud.dlp_v2.types.ContentItem): + The item to inspect. + finding_details (google.cloud.dlp_v2.types.HybridFindingDetails): + Supplementary information that will be added + to each finding. + """ + + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=1, + message='ContentItem', + ) + finding_details: 'HybridFindingDetails' = proto.Field( + proto.MESSAGE, + number=2, + message='HybridFindingDetails', + ) + + +class HybridFindingDetails(proto.Message): + r"""Populate to associate additional data with each finding. + + Attributes: + container_details (google.cloud.dlp_v2.types.Container): + Details about the container where the content + being inspected is from. + file_offset (int): + Offset in bytes of the line, from the + beginning of the file, where the finding is + located. Populate if the item being scanned is + only part of a bigger item, such as a shard of a + file and you want to track the absolute position + of the finding. + row_offset (int): + Offset of the row for tables. Populate if the + row(s) being scanned are part of a bigger + dataset and you want to keep track of their + absolute position. + table_options (google.cloud.dlp_v2.types.TableOptions): + If the container is a table, additional information to make + findings meaningful such as the columns that are primary + keys. If not known ahead of time, can also be set within + each inspect hybrid call and the two will be merged. Note + that identifying_fields will only be stored to BigQuery, and + only if the BigQuery action has been included. + labels (MutableMapping[str, str]): + Labels to represent user provided metadata about the data + being inspected. If configured by the job, some key values + may be required. The labels associated with ``Finding``'s + produced by hybrid inspection. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + """ + + container_details: 'Container' = proto.Field( + proto.MESSAGE, + number=1, + message='Container', + ) + file_offset: int = proto.Field( + proto.INT64, + number=2, + ) + row_offset: int = proto.Field( + proto.INT64, + number=3, + ) + table_options: storage.TableOptions = proto.Field( + proto.MESSAGE, + number=4, + message=storage.TableOptions, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + + +class HybridInspectResponse(proto.Message): + r"""Quota exceeded errors will be thrown once quota has been met. + """ + + +class DataRiskLevel(proto.Message): + r"""Score is a summary of all elements in the data profile. + A higher number means more risk. + + Attributes: + score (google.cloud.dlp_v2.types.DataRiskLevel.DataRiskLevelScore): + The score applied to the resource. + """ + class DataRiskLevelScore(proto.Enum): + r"""Various score levels for resources. + + Values: + RISK_SCORE_UNSPECIFIED (0): + Unused. + RISK_LOW (10): + Low risk - Lower indication of sensitive data + that appears to have additional access + restrictions in place or no indication of + sensitive data found. + RISK_MODERATE (20): + Medium risk - Sensitive data may be present + but additional access or fine grain access + restrictions appear to be present. Consider + limiting access even further or transform data + to mask. + RISK_HIGH (30): + High risk – SPII may be present. Access + controls may include public ACLs. Exfiltration + of data may lead to user data loss. + Re-identification of users may be possible. + Consider limiting usage and or removing SPII. + """ + RISK_SCORE_UNSPECIFIED = 0 + RISK_LOW = 10 + RISK_MODERATE = 20 + RISK_HIGH = 30 + + score: DataRiskLevelScore = proto.Field( + proto.ENUM, + number=1, + enum=DataRiskLevelScore, + ) + + +class DataProfileConfigSnapshot(proto.Message): + r"""Snapshot of the configurations used to generate the profile. + + Attributes: + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + A copy of the inspection config used to generate this + profile. This is a copy of the inspect_template specified in + ``DataProfileJobConfig``. + data_profile_job (google.cloud.dlp_v2.types.DataProfileJobConfig): + A copy of the configuration used to generate + this profile. + """ + + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + data_profile_job: 'DataProfileJobConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='DataProfileJobConfig', + ) + + +class TableDataProfile(proto.Message): + r"""The profile for a scanned table. + + Attributes: + name (str): + The name of the profile. + project_data_profile (str): + The resource name to the project data profile + for this table. + dataset_project_id (str): + The GCP project ID that owns the BigQuery + dataset. + dataset_location (str): + The BigQuery location where the dataset's + data is stored. See + https://cloud.google.com/bigquery/docs/locations + for supported locations. + dataset_id (str): + The BigQuery dataset ID. + table_id (str): + The BigQuery table ID. + full_resource (str): + The resource name of the table. + https://cloud.google.com/apis/design/resource_names#full_resource_name + profile_status (google.cloud.dlp_v2.types.ProfileStatus): + Success or error status from the most recent + profile generation attempt. May be empty if the + profile is still being generated. + state (google.cloud.dlp_v2.types.TableDataProfile.State): + State of a profile. + sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): + The sensitivity score of this table. + data_risk_level (google.cloud.dlp_v2.types.DataRiskLevel): + The data risk level of this table. + predicted_info_types (MutableSequence[google.cloud.dlp_v2.types.InfoTypeSummary]): + The infoTypes predicted from this table's + data. + other_info_types (MutableSequence[google.cloud.dlp_v2.types.OtherInfoTypeSummary]): + Other infoTypes found in this table's data. + config_snapshot (google.cloud.dlp_v2.types.DataProfileConfigSnapshot): + The snapshot of the configurations used to + generate the profile. + last_modified_time (google.protobuf.timestamp_pb2.Timestamp): + The time when this table was last modified + expiration_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time when this table expires. + scanned_column_count (int): + The number of columns profiled in the table. + failed_column_count (int): + The number of columns skipped in the table + because of an error. + table_size_bytes (int): + The size of the table when the profile was + generated. + row_count (int): + Number of rows in the table when the profile + was generated. This will not be populated for + BigLake tables. + encryption_status (google.cloud.dlp_v2.types.EncryptionStatus): + How the table is encrypted. + resource_visibility (google.cloud.dlp_v2.types.ResourceVisibility): + How broadly a resource has been shared. + profile_last_generated (google.protobuf.timestamp_pb2.Timestamp): + The last time the profile was generated. + resource_labels (MutableMapping[str, str]): + The labels applied to the resource at the + time the profile was generated. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the table was created. + """ + class State(proto.Enum): + r"""Possible states of a profile. New items may be added. + + Values: + STATE_UNSPECIFIED (0): + Unused. + RUNNING (1): + The profile is currently running. Once a + profile has finished it will transition to DONE. + DONE (2): + The profile is no longer generating. If + profile_status.status.code is 0, the profile succeeded, + otherwise, it failed. + """ + STATE_UNSPECIFIED = 0 + RUNNING = 1 + DONE = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + project_data_profile: str = proto.Field( + proto.STRING, + number=2, + ) + dataset_project_id: str = proto.Field( + proto.STRING, + number=24, + ) + dataset_location: str = proto.Field( + proto.STRING, + number=29, + ) + dataset_id: str = proto.Field( + proto.STRING, + number=25, + ) + table_id: str = proto.Field( + proto.STRING, + number=26, + ) + full_resource: str = proto.Field( + proto.STRING, + number=3, + ) + profile_status: 'ProfileStatus' = proto.Field( + proto.MESSAGE, + number=21, + message='ProfileStatus', + ) + state: State = proto.Field( + proto.ENUM, + number=22, + enum=State, + ) + sensitivity_score: storage.SensitivityScore = proto.Field( + proto.MESSAGE, + number=5, + message=storage.SensitivityScore, + ) + data_risk_level: 'DataRiskLevel' = proto.Field( + proto.MESSAGE, + number=6, + message='DataRiskLevel', + ) + predicted_info_types: MutableSequence['InfoTypeSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=27, + message='InfoTypeSummary', + ) + other_info_types: MutableSequence['OtherInfoTypeSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=28, + message='OtherInfoTypeSummary', + ) + config_snapshot: 'DataProfileConfigSnapshot' = proto.Field( + proto.MESSAGE, + number=7, + message='DataProfileConfigSnapshot', + ) + last_modified_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + expiration_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + scanned_column_count: int = proto.Field( + proto.INT64, + number=10, + ) + failed_column_count: int = proto.Field( + proto.INT64, + number=11, + ) + table_size_bytes: int = proto.Field( + proto.INT64, + number=12, + ) + row_count: int = proto.Field( + proto.INT64, + number=13, + ) + encryption_status: 'EncryptionStatus' = proto.Field( + proto.ENUM, + number=14, + enum='EncryptionStatus', + ) + resource_visibility: 'ResourceVisibility' = proto.Field( + proto.ENUM, + number=15, + enum='ResourceVisibility', + ) + profile_last_generated: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=16, + message=timestamp_pb2.Timestamp, + ) + resource_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=17, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=23, + message=timestamp_pb2.Timestamp, + ) + + +class ProfileStatus(proto.Message): + r""" + + Attributes: + status (google.rpc.status_pb2.Status): + Profiling status code and optional message + timestamp (google.protobuf.timestamp_pb2.Timestamp): + Time when the profile generation status was + updated + """ + + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class InfoTypeSummary(proto.Message): + r"""The infoType details for this column. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + The infoType. + estimated_prevalence (int): + Not populated for predicted infotypes. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + estimated_prevalence: int = proto.Field( + proto.INT32, + number=2, + ) + + +class OtherInfoTypeSummary(proto.Message): + r"""Infotype details for other infoTypes found within a column. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + The other infoType. + estimated_prevalence (int): + Approximate percentage of non-null rows that + contained data detected by this infotype. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + estimated_prevalence: int = proto.Field( + proto.INT32, + number=2, + ) + + +class DataProfilePubSubCondition(proto.Message): + r"""A condition for determining whether a Pub/Sub should be + triggered. + + Attributes: + expressions (google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubExpressions): + An expression. + """ + class ProfileScoreBucket(proto.Enum): + r"""Various score levels for resources. + + Values: + PROFILE_SCORE_BUCKET_UNSPECIFIED (0): + Unused. + HIGH (1): + High risk/sensitivity detected. + MEDIUM_OR_HIGH (2): + Medium or high risk/sensitivity detected. + """ + PROFILE_SCORE_BUCKET_UNSPECIFIED = 0 + HIGH = 1 + MEDIUM_OR_HIGH = 2 + + class PubSubCondition(proto.Message): + r"""A condition consisting of a value. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + minimum_risk_score (google.cloud.dlp_v2.types.DataProfilePubSubCondition.ProfileScoreBucket): + The minimum data risk score that triggers the + condition. + + This field is a member of `oneof`_ ``value``. + minimum_sensitivity_score (google.cloud.dlp_v2.types.DataProfilePubSubCondition.ProfileScoreBucket): + The minimum sensitivity level that triggers + the condition. + + This field is a member of `oneof`_ ``value``. + """ + + minimum_risk_score: 'DataProfilePubSubCondition.ProfileScoreBucket' = proto.Field( + proto.ENUM, + number=1, + oneof='value', + enum='DataProfilePubSubCondition.ProfileScoreBucket', + ) + minimum_sensitivity_score: 'DataProfilePubSubCondition.ProfileScoreBucket' = proto.Field( + proto.ENUM, + number=2, + oneof='value', + enum='DataProfilePubSubCondition.ProfileScoreBucket', + ) + + class PubSubExpressions(proto.Message): + r"""An expression, consisting of an operator and conditions. + + Attributes: + logical_operator (google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator): + The operator to apply to the collection of + conditions. + conditions (MutableSequence[google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubCondition]): + Conditions to apply to the expression. + """ + class PubSubLogicalOperator(proto.Enum): + r"""Logical operators for conditional checks. + + Values: + LOGICAL_OPERATOR_UNSPECIFIED (0): + Unused. + OR (1): + Conditional OR. + AND (2): + Conditional AND. + """ + LOGICAL_OPERATOR_UNSPECIFIED = 0 + OR = 1 + AND = 2 + + logical_operator: 'DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator' = proto.Field( + proto.ENUM, + number=1, + enum='DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator', + ) + conditions: MutableSequence['DataProfilePubSubCondition.PubSubCondition'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='DataProfilePubSubCondition.PubSubCondition', + ) + + expressions: PubSubExpressions = proto.Field( + proto.MESSAGE, + number=1, + message=PubSubExpressions, + ) + + +class DataProfilePubSubMessage(proto.Message): + r"""Pub/Sub topic message for a + DataProfileAction.PubSubNotification event. To receive a message + of protocol buffer schema type, convert the message data to an + object of this proto class. + + Attributes: + profile (google.cloud.dlp_v2.types.TableDataProfile): + If ``DetailLevel`` is ``TABLE_PROFILE`` this will be fully + populated. Otherwise, if ``DetailLevel`` is + ``RESOURCE_NAME``, then only ``name`` and ``full_resource`` + will be populated. + event (google.cloud.dlp_v2.types.DataProfileAction.EventType): + The event that caused the Pub/Sub message to + be sent. + """ + + profile: 'TableDataProfile' = proto.Field( + proto.MESSAGE, + number=1, + message='TableDataProfile', + ) + event: 'DataProfileAction.EventType' = proto.Field( + proto.ENUM, + number=2, + enum='DataProfileAction.EventType', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py new file mode 100644 index 00000000..4522dfd7 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py @@ -0,0 +1,1476 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.privacy.dlp.v2', + manifest={ + 'Likelihood', + 'FileType', + 'InfoType', + 'SensitivityScore', + 'StoredType', + 'CustomInfoType', + 'FieldId', + 'PartitionId', + 'KindExpression', + 'DatastoreOptions', + 'CloudStorageRegexFileSet', + 'CloudStorageOptions', + 'CloudStorageFileSet', + 'CloudStoragePath', + 'BigQueryOptions', + 'StorageConfig', + 'HybridOptions', + 'BigQueryKey', + 'DatastoreKey', + 'Key', + 'RecordKey', + 'BigQueryTable', + 'BigQueryField', + 'EntityId', + 'TableOptions', + }, +) + + +class Likelihood(proto.Enum): + r"""Categorization of results based on how likely they are to + represent a match, based on the number of elements they contain + which imply a match. + + Values: + LIKELIHOOD_UNSPECIFIED (0): + Default value; same as POSSIBLE. + VERY_UNLIKELY (1): + Few matching elements. + UNLIKELY (2): + No description available. + POSSIBLE (3): + Some matching elements. + LIKELY (4): + No description available. + VERY_LIKELY (5): + Many matching elements. + """ + LIKELIHOOD_UNSPECIFIED = 0 + VERY_UNLIKELY = 1 + UNLIKELY = 2 + POSSIBLE = 3 + LIKELY = 4 + VERY_LIKELY = 5 + + +class FileType(proto.Enum): + r"""Definitions of file type groups to scan. New types will be + added to this list. + + Values: + FILE_TYPE_UNSPECIFIED (0): + Includes all files. + BINARY_FILE (1): + Includes all file extensions not covered by another entry. + Binary scanning attempts to convert the content of the file + to utf_8 to scan the file. If you wish to avoid this fall + back, specify one or more of the other FileType's in your + storage scan. + TEXT_FILE (2): + Included file extensions: + asc,asp, aspx, brf, c, cc,cfm, cgi, cpp, csv, + cxx, c++, cs, css, dart, dat, dot, eml,, + epbub, ged, go, h, hh, hpp, hxx, h++, hs, html, + htm, mkd, markdown, m, ml, mli, perl, pl, + plist, pm, php, phtml, pht, properties, py, + pyw, rb, rbw, rs, rss, rc, scala, sh, sql, + swift, tex, shtml, shtm, xhtml, lhs, ics, ini, + java, js, json, kix, kml, ocaml, md, txt, + text, tsv, vb, vcard, vcs, wml, xcodeproj, xml, + xsl, xsd, yml, yaml. + IMAGE (3): + Included file extensions: bmp, gif, jpg, jpeg, jpe, png. + bytes_limit_per_file has no effect on image files. Image + inspection is restricted to 'global', 'us', 'asia', and + 'europe'. + WORD (5): + Word files >30 MB will be scanned as binary + files. Included file extensions: + docx, dotx, docm, dotm + PDF (6): + PDF files >30 MB will be scanned as binary + files. Included file extensions: + pdf + AVRO (7): + Included file extensions: + avro + CSV (8): + Included file extensions: + csv + TSV (9): + Included file extensions: + tsv + POWERPOINT (11): + Powerpoint files >30 MB will be scanned as + binary files. Included file extensions: + pptx, pptm, potx, potm, pot + EXCEL (12): + Excel files >30 MB will be scanned as binary + files. Included file extensions: + xlsx, xlsm, xltx, xltm + """ + FILE_TYPE_UNSPECIFIED = 0 + BINARY_FILE = 1 + TEXT_FILE = 2 + IMAGE = 3 + WORD = 5 + PDF = 6 + AVRO = 7 + CSV = 8 + TSV = 9 + POWERPOINT = 11 + EXCEL = 12 + + +class InfoType(proto.Message): + r"""Type of information detected by the API. + + Attributes: + name (str): + Name of the information type. Either a name of your choosing + when creating a CustomInfoType, or one of the names listed + at https://cloud.google.com/dlp/docs/infotypes-reference + when specifying a built-in type. When sending Cloud DLP + results to Data Catalog, infoType names should conform to + the pattern ``[A-Za-z0-9$_-]{1,64}``. + version (str): + Optional version name for this InfoType. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SensitivityScore(proto.Message): + r"""Score is a summary of all elements in the data profile. + A higher number means more sensitive. + + Attributes: + score (google.cloud.dlp_v2.types.SensitivityScore.SensitivityScoreLevel): + The score applied to the resource. + """ + class SensitivityScoreLevel(proto.Enum): + r"""Various score levels for resources. + + Values: + SENSITIVITY_SCORE_UNSPECIFIED (0): + Unused. + SENSITIVITY_LOW (10): + No sensitive information detected. Limited + access. + SENSITIVITY_MODERATE (20): + Medium risk - PII, potentially sensitive + data, or fields with free-text data that are at + higher risk of having intermittent sensitive + data. Consider limiting access. + SENSITIVITY_HIGH (30): + High risk – SPII may be present. Exfiltration + of data may lead to user data loss. + Re-identification of users may be possible. + Consider limiting usage and or removing SPII. + """ + SENSITIVITY_SCORE_UNSPECIFIED = 0 + SENSITIVITY_LOW = 10 + SENSITIVITY_MODERATE = 20 + SENSITIVITY_HIGH = 30 + + score: SensitivityScoreLevel = proto.Field( + proto.ENUM, + number=1, + enum=SensitivityScoreLevel, + ) + + +class StoredType(proto.Message): + r"""A reference to a StoredInfoType to use with scanning. + + Attributes: + name (str): + Resource name of the requested ``StoredInfoType``, for + example + ``organizations/433245324/storedInfoTypes/432452342`` or + ``projects/project-id/storedInfoTypes/432452342``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp indicating when the version of the + ``StoredInfoType`` used for inspection was created. + Output-only field, populated by the system. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class CustomInfoType(proto.Message): + r"""Custom information type provided by the user. Used to find + domain-specific sensitive information configurable to the data + in question. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + CustomInfoType can either be a new infoType, or an extension + of built-in infoType, when the name matches one of existing + infoTypes and that infoType is specified in + ``InspectContent.info_types`` field. Specifying the latter + adds findings to the one detected by the system. If built-in + info type is not specified in ``InspectContent.info_types`` + list then the name is treated as a custom info type. + likelihood (google.cloud.dlp_v2.types.Likelihood): + Likelihood to return for this CustomInfoType. This base + value can be altered by a detection rule if the finding + meets the criteria specified by the rule. Defaults to + ``VERY_LIKELY`` if not specified. + dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): + A list of phrases to detect as a + CustomInfoType. + + This field is a member of `oneof`_ ``type``. + regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression based CustomInfoType. + + This field is a member of `oneof`_ ``type``. + surrogate_type (google.cloud.dlp_v2.types.CustomInfoType.SurrogateType): + Message for detecting output from + deidentification transformations that support + reversing. + + This field is a member of `oneof`_ ``type``. + stored_type (google.cloud.dlp_v2.types.StoredType): + Load an existing ``StoredInfoType`` resource for use in + ``InspectDataSource``. Not currently supported in + ``InspectContent``. + + This field is a member of `oneof`_ ``type``. + detection_rules (MutableSequence[google.cloud.dlp_v2.types.CustomInfoType.DetectionRule]): + Set of detection rules to apply to all findings of this + CustomInfoType. Rules are applied in order that they are + specified. Not supported for the ``surrogate_type`` + CustomInfoType. + exclusion_type (google.cloud.dlp_v2.types.CustomInfoType.ExclusionType): + If set to EXCLUSION_TYPE_EXCLUDE this infoType will not + cause a finding to be returned. It still can be used for + rules matching. + """ + class ExclusionType(proto.Enum): + r""" + + Values: + EXCLUSION_TYPE_UNSPECIFIED (0): + A finding of this custom info type will not + be excluded from results. + EXCLUSION_TYPE_EXCLUDE (1): + A finding of this custom info type will be + excluded from final results, but can still + affect rule execution. + """ + EXCLUSION_TYPE_UNSPECIFIED = 0 + EXCLUSION_TYPE_EXCLUDE = 1 + + class Dictionary(proto.Message): + r"""Custom information type based on a dictionary of words or phrases. + This can be used to match sensitive information specific to the + data, such as a list of employee IDs or job titles. + + Dictionary words are case-insensitive and all characters other than + letters and digits in the unicode `Basic Multilingual + Plane `__ + will be replaced with whitespace when scanning for matches, so the + dictionary phrase "Sam Johnson" will match all three phrases "sam + johnson", "Sam, Johnson", and "Sam (Johnson)". Additionally, the + characters surrounding any match must be of a different type than + the adjacent characters within the word, so letters must be next to + non-letters and digits next to non-digits. For example, the + dictionary word "jen" will match the first three letters of the text + "jen123" but will return no matches for "jennifer". + + Dictionary words containing a large number of characters that are + not letters or digits may result in unexpected findings because such + characters are treated as whitespace. The + `limits `__ page contains + details about the size limits of dictionaries. For dictionaries that + do not fit within these constraints, consider using + ``LargeCustomDictionaryConfig`` in the ``StoredInfoType`` API. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): + List of words or phrases to search for. + + This field is a member of `oneof`_ ``source``. + cloud_storage_path (google.cloud.dlp_v2.types.CloudStoragePath): + Newline-delimited file of words in Cloud + Storage. Only a single file is accepted. + + This field is a member of `oneof`_ ``source``. + """ + + class WordList(proto.Message): + r"""Message defining a list of words or phrases to search for in + the data. + + Attributes: + words (MutableSequence[str]): + Words or phrases defining the dictionary. The dictionary + must contain at least one phrase and every phrase must + contain at least 2 characters that are letters or digits. + [required] + """ + + words: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + word_list: 'CustomInfoType.Dictionary.WordList' = proto.Field( + proto.MESSAGE, + number=1, + oneof='source', + message='CustomInfoType.Dictionary.WordList', + ) + cloud_storage_path: 'CloudStoragePath' = proto.Field( + proto.MESSAGE, + number=3, + oneof='source', + message='CloudStoragePath', + ) + + class Regex(proto.Message): + r"""Message defining a custom regular expression. + + Attributes: + pattern (str): + Pattern defining the regular expression. Its + syntax + (https://github.com/google/re2/wiki/Syntax) can + be found under the google/re2 repository on + GitHub. + group_indexes (MutableSequence[int]): + The index of the submatch to extract as + findings. When not specified, the entire match + is returned. No more than 3 may be included. + """ + + pattern: str = proto.Field( + proto.STRING, + number=1, + ) + group_indexes: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=2, + ) + + class SurrogateType(proto.Message): + r"""Message for detecting output from deidentification transformations + such as + ```CryptoReplaceFfxFpeConfig`` `__. + These types of transformations are those that perform + pseudonymization, thereby producing a "surrogate" as output. This + should be used in conjunction with a field on the transformation + such as ``surrogate_info_type``. This CustomInfoType does not + support the use of ``detection_rules``. + + """ + + class DetectionRule(proto.Message): + r"""Deprecated; use ``InspectionRuleSet`` instead. Rule for modifying a + ``CustomInfoType`` to alter behavior under certain circumstances, + depending on the specific details of the rule. Not supported for the + ``surrogate_type`` custom infoType. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): + Hotword-based detection rule. + + This field is a member of `oneof`_ ``type``. + """ + + class Proximity(proto.Message): + r"""Message for specifying a window around a finding to apply a + detection rule. + + Attributes: + window_before (int): + Number of characters before the finding to consider. For + tabular data, if you want to modify the likelihood of an + entire column of findngs, set this to 1. For more + information, see [Hotword example: Set the match likelihood + of a table column] + (https://cloud.google.com/dlp/docs/creating-custom-infotypes-likelihood#match-column-values). + window_after (int): + Number of characters after the finding to + consider. + """ + + window_before: int = proto.Field( + proto.INT32, + number=1, + ) + window_after: int = proto.Field( + proto.INT32, + number=2, + ) + + class LikelihoodAdjustment(proto.Message): + r"""Message for specifying an adjustment to the likelihood of a + finding as part of a detection rule. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fixed_likelihood (google.cloud.dlp_v2.types.Likelihood): + Set the likelihood of a finding to a fixed + value. + + This field is a member of `oneof`_ ``adjustment``. + relative_likelihood (int): + Increase or decrease the likelihood by the specified number + of levels. For example, if a finding would be ``POSSIBLE`` + without the detection rule and ``relative_likelihood`` is 1, + then it is upgraded to ``LIKELY``, while a value of -1 would + downgrade it to ``UNLIKELY``. Likelihood may never drop + below ``VERY_UNLIKELY`` or exceed ``VERY_LIKELY``, so + applying an adjustment of 1 followed by an adjustment of -1 + when base likelihood is ``VERY_LIKELY`` will result in a + final likelihood of ``LIKELY``. + + This field is a member of `oneof`_ ``adjustment``. + """ + + fixed_likelihood: 'Likelihood' = proto.Field( + proto.ENUM, + number=1, + oneof='adjustment', + enum='Likelihood', + ) + relative_likelihood: int = proto.Field( + proto.INT32, + number=2, + oneof='adjustment', + ) + + class HotwordRule(proto.Message): + r"""The rule that adjusts the likelihood of findings within a + certain proximity of hotwords. + + Attributes: + hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression pattern defining what + qualifies as a hotword. + proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): + Range of characters within which the entire hotword must + reside. The total length of the window cannot exceed 1000 + characters. The finding itself will be included in the + window, so that hotwords can be used to match substrings of + the finding itself. Suppose you want Cloud DLP to promote + the likelihood of the phone number regex "(\d{3}) + \\d{3}-\d{4}" if the area code is known to be the area code + of a company's office. In this case, use the hotword regex + "(xxx)", where "xxx" is the area code in question. + + For tabular data, if you want to modify the likelihood of an + entire column of findngs, see [Hotword example: Set the + match likelihood of a table column] + (https://cloud.google.com/dlp/docs/creating-custom-infotypes-likelihood#match-column-values). + likelihood_adjustment (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.LikelihoodAdjustment): + Likelihood adjustment to apply to all + matching findings. + """ + + hotword_regex: 'CustomInfoType.Regex' = proto.Field( + proto.MESSAGE, + number=1, + message='CustomInfoType.Regex', + ) + proximity: 'CustomInfoType.DetectionRule.Proximity' = proto.Field( + proto.MESSAGE, + number=2, + message='CustomInfoType.DetectionRule.Proximity', + ) + likelihood_adjustment: 'CustomInfoType.DetectionRule.LikelihoodAdjustment' = proto.Field( + proto.MESSAGE, + number=3, + message='CustomInfoType.DetectionRule.LikelihoodAdjustment', + ) + + hotword_rule: 'CustomInfoType.DetectionRule.HotwordRule' = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message='CustomInfoType.DetectionRule.HotwordRule', + ) + + info_type: 'InfoType' = proto.Field( + proto.MESSAGE, + number=1, + message='InfoType', + ) + likelihood: 'Likelihood' = proto.Field( + proto.ENUM, + number=6, + enum='Likelihood', + ) + dictionary: Dictionary = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message=Dictionary, + ) + regex: Regex = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message=Regex, + ) + surrogate_type: SurrogateType = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message=SurrogateType, + ) + stored_type: 'StoredType' = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message='StoredType', + ) + detection_rules: MutableSequence[DetectionRule] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=DetectionRule, + ) + exclusion_type: ExclusionType = proto.Field( + proto.ENUM, + number=8, + enum=ExclusionType, + ) + + +class FieldId(proto.Message): + r"""General identifier of a data field in a storage service. + + Attributes: + name (str): + Name describing the field. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PartitionId(proto.Message): + r"""Datastore partition ID. + A partition ID identifies a grouping of entities. The grouping + is always by project and namespace, however the namespace ID may + be empty. + A partition ID contains several dimensions: + project ID and namespace ID. + + Attributes: + project_id (str): + The ID of the project to which the entities + belong. + namespace_id (str): + If not empty, the ID of the namespace to + which the entities belong. + """ + + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + namespace_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class KindExpression(proto.Message): + r"""A representation of a Datastore kind. + + Attributes: + name (str): + The name of the kind. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DatastoreOptions(proto.Message): + r"""Options defining a data set within Google Cloud Datastore. + + Attributes: + partition_id (google.cloud.dlp_v2.types.PartitionId): + A partition ID identifies a grouping of + entities. The grouping is always by project and + namespace, however the namespace ID may be + empty. + kind (google.cloud.dlp_v2.types.KindExpression): + The kind to process. + """ + + partition_id: 'PartitionId' = proto.Field( + proto.MESSAGE, + number=1, + message='PartitionId', + ) + kind: 'KindExpression' = proto.Field( + proto.MESSAGE, + number=2, + message='KindExpression', + ) + + +class CloudStorageRegexFileSet(proto.Message): + r"""Message representing a set of files in a Cloud Storage bucket. + Regular expressions are used to allow fine-grained control over + which files in the bucket to include. + + Included files are those that match at least one item in + ``include_regex`` and do not match any items in ``exclude_regex``. + Note that a file that matches items from both lists will *not* be + included. For a match to occur, the entire file path (i.e., + everything in the url after the bucket name) must match the regular + expression. + + For example, given the input + ``{bucket_name: "mybucket", include_regex: ["directory1/.*"], exclude_regex: ["directory1/excluded.*"]}``: + + - ``gs://mybucket/directory1/myfile`` will be included + - ``gs://mybucket/directory1/directory2/myfile`` will be included + (``.*`` matches across ``/``) + - ``gs://mybucket/directory0/directory1/myfile`` will *not* be + included (the full path doesn't match any items in + ``include_regex``) + - ``gs://mybucket/directory1/excludedfile`` will *not* be included + (the path matches an item in ``exclude_regex``) + + If ``include_regex`` is left empty, it will match all files by + default (this is equivalent to setting ``include_regex: [".*"]``). + + Some other common use cases: + + - ``{bucket_name: "mybucket", exclude_regex: [".*\.pdf"]}`` will + include all files in ``mybucket`` except for .pdf files + - ``{bucket_name: "mybucket", include_regex: ["directory/[^/]+"]}`` + will include all files directly under + ``gs://mybucket/directory/``, without matching across ``/`` + + Attributes: + bucket_name (str): + The name of a Cloud Storage bucket. Required. + include_regex (MutableSequence[str]): + A list of regular expressions matching file paths to + include. All files in the bucket that match at least one of + these regular expressions will be included in the set of + files, except for those that also match an item in + ``exclude_regex``. Leaving this field empty will match all + files by default (this is equivalent to including ``.*`` in + the list). + + Regular expressions use RE2 + `syntax `__; a + guide can be found under the google/re2 repository on + GitHub. + exclude_regex (MutableSequence[str]): + A list of regular expressions matching file paths to + exclude. All files in the bucket that match at least one of + these regular expressions will be excluded from the scan. + + Regular expressions use RE2 + `syntax `__; a + guide can be found under the google/re2 repository on + GitHub. + """ + + bucket_name: str = proto.Field( + proto.STRING, + number=1, + ) + include_regex: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + exclude_regex: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CloudStorageOptions(proto.Message): + r"""Options defining a file or a set of files within a Cloud + Storage bucket. + + Attributes: + file_set (google.cloud.dlp_v2.types.CloudStorageOptions.FileSet): + The set of one or more files to scan. + bytes_limit_per_file (int): + Max number of bytes to scan from a file. If a scanned file's + size is bigger than this value then the rest of the bytes + are omitted. Only one of bytes_limit_per_file and + bytes_limit_per_file_percent can be specified. Cannot be set + if de-identification is requested. + bytes_limit_per_file_percent (int): + Max percentage of bytes to scan from a file. The rest are + omitted. The number of bytes scanned is rounded down. Must + be between 0 and 100, inclusively. Both 0 and 100 means no + limit. Defaults to 0. Only one of bytes_limit_per_file and + bytes_limit_per_file_percent can be specified. Cannot be set + if de-identification is requested. + file_types (MutableSequence[google.cloud.dlp_v2.types.FileType]): + List of file type groups to include in the scan. If empty, + all files are scanned and available data format processors + are applied. In addition, the binary content of the selected + files is always scanned as well. Images are scanned only as + binary if the specified region does not support image + inspection and no file_types were specified. Image + inspection is restricted to 'global', 'us', 'asia', and + 'europe'. + sample_method (google.cloud.dlp_v2.types.CloudStorageOptions.SampleMethod): + + files_limit_percent (int): + Limits the number of files to scan to this + percentage of the input FileSet. Number of files + scanned is rounded down. Must be between 0 and + 100, inclusively. Both 0 and 100 means no limit. + Defaults to 0. + """ + class SampleMethod(proto.Enum): + r"""How to sample bytes if not all bytes are scanned. Meaningful only + when used in conjunction with bytes_limit_per_file. If not + specified, scanning would start from the top. + + Values: + SAMPLE_METHOD_UNSPECIFIED (0): + No description available. + TOP (1): + Scan from the top (default). + RANDOM_START (2): + For each file larger than bytes_limit_per_file, randomly + pick the offset to start scanning. The scanned bytes are + contiguous. + """ + SAMPLE_METHOD_UNSPECIFIED = 0 + TOP = 1 + RANDOM_START = 2 + + class FileSet(proto.Message): + r"""Set of files to scan. + + Attributes: + url (str): + The Cloud Storage url of the file(s) to scan, in the format + ``gs:///``. Trailing wildcard in the path is + allowed. + + If the url ends in a trailing slash, the bucket or directory + represented by the url will be scanned non-recursively + (content in sub-directories will not be scanned). This means + that ``gs://mybucket/`` is equivalent to + ``gs://mybucket/*``, and ``gs://mybucket/directory/`` is + equivalent to ``gs://mybucket/directory/*``. + + Exactly one of ``url`` or ``regex_file_set`` must be set. + regex_file_set (google.cloud.dlp_v2.types.CloudStorageRegexFileSet): + The regex-filtered set of files to scan. Exactly one of + ``url`` or ``regex_file_set`` must be set. + """ + + url: str = proto.Field( + proto.STRING, + number=1, + ) + regex_file_set: 'CloudStorageRegexFileSet' = proto.Field( + proto.MESSAGE, + number=2, + message='CloudStorageRegexFileSet', + ) + + file_set: FileSet = proto.Field( + proto.MESSAGE, + number=1, + message=FileSet, + ) + bytes_limit_per_file: int = proto.Field( + proto.INT64, + number=4, + ) + bytes_limit_per_file_percent: int = proto.Field( + proto.INT32, + number=8, + ) + file_types: MutableSequence['FileType'] = proto.RepeatedField( + proto.ENUM, + number=5, + enum='FileType', + ) + sample_method: SampleMethod = proto.Field( + proto.ENUM, + number=6, + enum=SampleMethod, + ) + files_limit_percent: int = proto.Field( + proto.INT32, + number=7, + ) + + +class CloudStorageFileSet(proto.Message): + r"""Message representing a set of files in Cloud Storage. + + Attributes: + url (str): + The url, in the format ``gs:///``. Trailing + wildcard in the path is allowed. + """ + + url: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CloudStoragePath(proto.Message): + r"""Message representing a single file or path in Cloud Storage. + + Attributes: + path (str): + A url representing a file or path (no wildcards) in Cloud + Storage. Example: gs://[BUCKET_NAME]/dictionary.txt + """ + + path: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BigQueryOptions(proto.Message): + r"""Options defining BigQuery table and row identifiers. + + Attributes: + table_reference (google.cloud.dlp_v2.types.BigQueryTable): + Complete BigQuery table reference. + identifying_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Table fields that may uniquely identify a row within the + table. When ``actions.saveFindings.outputConfig.table`` is + specified, the values of columns specified here are + available in the output table under + ``location.content_locations.record_location.record_key.id_values``. + Nested fields such as ``person.birthdate.year`` are allowed. + rows_limit (int): + Max number of rows to scan. If the table has more rows than + this value, the rest of the rows are omitted. If not set, or + if set to 0, all rows will be scanned. Only one of + rows_limit and rows_limit_percent can be specified. Cannot + be used in conjunction with TimespanConfig. + rows_limit_percent (int): + Max percentage of rows to scan. The rest are omitted. The + number of rows scanned is rounded down. Must be between 0 + and 100, inclusively. Both 0 and 100 means no limit. + Defaults to 0. Only one of rows_limit and rows_limit_percent + can be specified. Cannot be used in conjunction with + TimespanConfig. + sample_method (google.cloud.dlp_v2.types.BigQueryOptions.SampleMethod): + + excluded_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + References to fields excluded from scanning. + This allows you to skip inspection of entire + columns which you know have no findings. + included_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Limit scanning only to these fields. + """ + class SampleMethod(proto.Enum): + r"""How to sample rows if not all rows are scanned. Meaningful only when + used in conjunction with either rows_limit or rows_limit_percent. If + not specified, rows are scanned in the order BigQuery reads them. + + Values: + SAMPLE_METHOD_UNSPECIFIED (0): + No description available. + TOP (1): + Scan groups of rows in the order BigQuery + provides (default). Multiple groups of rows may + be scanned in parallel, so results may not + appear in the same order the rows are read. + RANDOM_START (2): + Randomly pick groups of rows to scan. + """ + SAMPLE_METHOD_UNSPECIFIED = 0 + TOP = 1 + RANDOM_START = 2 + + table_reference: 'BigQueryTable' = proto.Field( + proto.MESSAGE, + number=1, + message='BigQueryTable', + ) + identifying_fields: MutableSequence['FieldId'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='FieldId', + ) + rows_limit: int = proto.Field( + proto.INT64, + number=3, + ) + rows_limit_percent: int = proto.Field( + proto.INT32, + number=6, + ) + sample_method: SampleMethod = proto.Field( + proto.ENUM, + number=4, + enum=SampleMethod, + ) + excluded_fields: MutableSequence['FieldId'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='FieldId', + ) + included_fields: MutableSequence['FieldId'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='FieldId', + ) + + +class StorageConfig(proto.Message): + r"""Shared message indicating Cloud storage type. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + datastore_options (google.cloud.dlp_v2.types.DatastoreOptions): + Google Cloud Datastore options. + + This field is a member of `oneof`_ ``type``. + cloud_storage_options (google.cloud.dlp_v2.types.CloudStorageOptions): + Cloud Storage options. + + This field is a member of `oneof`_ ``type``. + big_query_options (google.cloud.dlp_v2.types.BigQueryOptions): + BigQuery options. + + This field is a member of `oneof`_ ``type``. + hybrid_options (google.cloud.dlp_v2.types.HybridOptions): + Hybrid inspection options. + + This field is a member of `oneof`_ ``type``. + timespan_config (google.cloud.dlp_v2.types.StorageConfig.TimespanConfig): + + """ + + class TimespanConfig(proto.Message): + r"""Configuration of the timespan of the items to include in + scanning. Currently only supported when inspecting Cloud Storage + and BigQuery. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Exclude files, tables, or rows older than + this value. If not set, no lower time limit is + applied. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Exclude files, tables, or rows newer than + this value. If not set, no upper time limit is + applied. + timestamp_field (google.cloud.dlp_v2.types.FieldId): + Specification of the field containing the timestamp of + scanned items. Used for data sources like Datastore and + BigQuery. + + For BigQuery + + If this value is not specified and the table was modified + between the given start and end times, the entire table will + be scanned. If this value is specified, then rows are + filtered based on the given start and end times. Rows with a + ``NULL`` value in the provided BigQuery column are skipped. + Valid data types of the provided BigQuery column are: + ``INTEGER``, ``DATE``, ``TIMESTAMP``, and ``DATETIME``. + + If your BigQuery table is `partitioned at ingestion + time `__, + you can use any of the following pseudo-columns as your + timestamp field. When used with Cloud DLP, these + pseudo-column names are case sensitive. + + .. raw:: html + +
    +
  • _PARTITIONTIME
  • +
  • _PARTITIONDATE
  • +
  • _PARTITION_LOAD_TIME
  • +
+ + For Datastore + + If this value is specified, then entities are filtered based + on the given start and end times. If an entity does not + contain the provided timestamp property or contains empty or + invalid values, then it is included. Valid data types of the + provided timestamp property are: ``TIMESTAMP``. + + See the `known + issue `__ + related to this operation. + enable_auto_population_of_timespan_config (bool): + When the job is started by a JobTrigger we will + automatically figure out a valid start_time to avoid + scanning files that have not been modified since the last + time the JobTrigger executed. This will be based on the time + of the execution of the last run of the JobTrigger or the + timespan end_time used in the last run of the JobTrigger. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + timestamp_field: 'FieldId' = proto.Field( + proto.MESSAGE, + number=3, + message='FieldId', + ) + enable_auto_population_of_timespan_config: bool = proto.Field( + proto.BOOL, + number=4, + ) + + datastore_options: 'DatastoreOptions' = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message='DatastoreOptions', + ) + cloud_storage_options: 'CloudStorageOptions' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='CloudStorageOptions', + ) + big_query_options: 'BigQueryOptions' = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message='BigQueryOptions', + ) + hybrid_options: 'HybridOptions' = proto.Field( + proto.MESSAGE, + number=9, + oneof='type', + message='HybridOptions', + ) + timespan_config: TimespanConfig = proto.Field( + proto.MESSAGE, + number=6, + message=TimespanConfig, + ) + + +class HybridOptions(proto.Message): + r"""Configuration to control jobs where the content being + inspected is outside of Google Cloud Platform. + + Attributes: + description (str): + A short description of where the data is + coming from. Will be stored once in the job. 256 + max length. + required_finding_label_keys (MutableSequence[str]): + These are labels that each inspection request must include + within their 'finding_labels' map. Request may contain + others, but any missing one of these will be rejected. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + No more than 10 keys can be required. + labels (MutableMapping[str, str]): + To organize findings, these labels will be added to each + finding. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + table_options (google.cloud.dlp_v2.types.TableOptions): + If the container is a table, additional + information to make findings meaningful such as + the columns that are primary keys. + """ + + description: str = proto.Field( + proto.STRING, + number=1, + ) + required_finding_label_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + table_options: 'TableOptions' = proto.Field( + proto.MESSAGE, + number=4, + message='TableOptions', + ) + + +class BigQueryKey(proto.Message): + r"""Row key for identifying a record in BigQuery table. + + Attributes: + table_reference (google.cloud.dlp_v2.types.BigQueryTable): + Complete BigQuery table reference. + row_number (int): + Row number inferred at the time the table was scanned. This + value is nondeterministic, cannot be queried, and may be + null for inspection jobs. To locate findings within a table, + specify + ``inspect_job.storage_config.big_query_options.identifying_fields`` + in ``CreateDlpJobRequest``. + """ + + table_reference: 'BigQueryTable' = proto.Field( + proto.MESSAGE, + number=1, + message='BigQueryTable', + ) + row_number: int = proto.Field( + proto.INT64, + number=2, + ) + + +class DatastoreKey(proto.Message): + r"""Record key for a finding in Cloud Datastore. + + Attributes: + entity_key (google.cloud.dlp_v2.types.Key): + Datastore entity key. + """ + + entity_key: 'Key' = proto.Field( + proto.MESSAGE, + number=1, + message='Key', + ) + + +class Key(proto.Message): + r"""A unique identifier for a Datastore entity. + If a key's partition ID or any of its path kinds or names are + reserved/read-only, the key is reserved/read-only. + A reserved/read-only key is forbidden in certain documented + contexts. + + Attributes: + partition_id (google.cloud.dlp_v2.types.PartitionId): + Entities are partitioned into subsets, + currently identified by a project ID and + namespace ID. Queries are scoped to a single + partition. + path (MutableSequence[google.cloud.dlp_v2.types.Key.PathElement]): + The entity path. An entity path consists of one or more + elements composed of a kind and a string or numerical + identifier, which identify entities. The first element + identifies a *root entity*, the second element identifies a + *child* of the root entity, the third element identifies a + child of the second entity, and so forth. The entities + identified by all prefixes of the path are called the + element's *ancestors*. + + A path can never be empty, and a path can have at most 100 + elements. + """ + + class PathElement(proto.Message): + r"""A (kind, ID/name) pair used to construct a key path. + If either name or ID is set, the element is complete. If neither + is set, the element is incomplete. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kind (str): + The kind of the entity. A kind matching regex ``__.*__`` is + reserved/read-only. A kind must not contain more than 1500 + bytes when UTF-8 encoded. Cannot be ``""``. + id (int): + The auto-allocated ID of the entity. + Never equal to zero. Values less than zero are + discouraged and may not be supported in the + future. + + This field is a member of `oneof`_ ``id_type``. + name (str): + The name of the entity. A name matching regex ``__.*__`` is + reserved/read-only. A name must not be more than 1500 bytes + when UTF-8 encoded. Cannot be ``""``. + + This field is a member of `oneof`_ ``id_type``. + """ + + kind: str = proto.Field( + proto.STRING, + number=1, + ) + id: int = proto.Field( + proto.INT64, + number=2, + oneof='id_type', + ) + name: str = proto.Field( + proto.STRING, + number=3, + oneof='id_type', + ) + + partition_id: 'PartitionId' = proto.Field( + proto.MESSAGE, + number=1, + message='PartitionId', + ) + path: MutableSequence[PathElement] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=PathElement, + ) + + +class RecordKey(proto.Message): + r"""Message for a unique key indicating a record that contains a + finding. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + datastore_key (google.cloud.dlp_v2.types.DatastoreKey): + + This field is a member of `oneof`_ ``type``. + big_query_key (google.cloud.dlp_v2.types.BigQueryKey): + + This field is a member of `oneof`_ ``type``. + id_values (MutableSequence[str]): + Values of identifying columns in the given row. Order of + values matches the order of ``identifying_fields`` specified + in the scanning request. + """ + + datastore_key: 'DatastoreKey' = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message='DatastoreKey', + ) + big_query_key: 'BigQueryKey' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='BigQueryKey', + ) + id_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +class BigQueryTable(proto.Message): + r"""Message defining the location of a BigQuery table. A table is + uniquely identified by its project_id, dataset_id, and table_name. + Within a query a table is often referenced with a string in the + format of: ``:.`` or + ``..``. + + Attributes: + project_id (str): + The Google Cloud Platform project ID of the + project containing the table. If omitted, + project ID is inferred from the API call. + dataset_id (str): + Dataset ID of the table. + table_id (str): + Name of the table. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + dataset_id: str = proto.Field( + proto.STRING, + number=2, + ) + table_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class BigQueryField(proto.Message): + r"""Message defining a field of a BigQuery table. + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Source table of the field. + field (google.cloud.dlp_v2.types.FieldId): + Designated field in the BigQuery table. + """ + + table: 'BigQueryTable' = proto.Field( + proto.MESSAGE, + number=1, + message='BigQueryTable', + ) + field: 'FieldId' = proto.Field( + proto.MESSAGE, + number=2, + message='FieldId', + ) + + +class EntityId(proto.Message): + r"""An entity in a dataset is a field or set of fields that correspond + to a single person. For example, in medical records the ``EntityId`` + might be a patient identifier, or for financial records it might be + an account identifier. This message is used when generalizations or + analysis must take into account that multiple rows correspond to the + same entity. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Composite key indicating which field contains + the entity identifier. + """ + + field: 'FieldId' = proto.Field( + proto.MESSAGE, + number=1, + message='FieldId', + ) + + +class TableOptions(proto.Message): + r"""Instructions regarding the table content being inspected. + + Attributes: + identifying_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + The columns that are the primary keys for + table objects included in ContentItem. A copy of + this cell's value will stored alongside + alongside each finding so that the finding can + be traced to the specific row it came from. No + more than 3 may be provided. + """ + + identifying_fields: MutableSequence['FieldId'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='FieldId', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/mypy.ini b/owl-bot-staging/v2/mypy.ini new file mode 100644 index 00000000..574c5aed --- /dev/null +++ b/owl-bot-staging/v2/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/v2/noxfile.py b/owl-bot-staging/v2/noxfile.py new file mode 100644 index 00000000..6b1462df --- /dev/null +++ b/owl-bot-staging/v2/noxfile.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.11" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/dlp_v2/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py new file mode 100644 index 00000000..e4371abf --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ActivateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ActivateJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_activate_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ActivateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.activate_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ActivateJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py new file mode 100644 index 00000000..c0b4fac1 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ActivateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ActivateJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_activate_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ActivateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.activate_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ActivateJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py new file mode 100644 index 00000000..d8190299 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CancelDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_cancel_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CancelDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_CancelDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py new file mode 100644 index 00000000..7475d6fa --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CancelDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_cancel_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CancelDlpJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_CancelDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py new file mode 100644 index 00000000..81ad2519 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDeidentifyTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py new file mode 100644 index 00000000..b394f634 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDeidentifyTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py new file mode 100644 index 00000000..28770717 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDlpJobRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py new file mode 100644 index 00000000..779754f6 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDlpJobRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py new file mode 100644 index 00000000..aeb40676 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateInspectTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateInspectTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py new file mode 100644 index 00000000..0e344b36 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateInspectTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateInspectTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py new file mode 100644 index 00000000..3e82b8f3 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + job_trigger = dlp_v2.JobTrigger() + job_trigger.status = "CANCELLED" + + request = dlp_v2.CreateJobTriggerRequest( + parent="parent_value", + job_trigger=job_trigger, + ) + + # Make the request + response = await client.create_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py new file mode 100644 index 00000000..ebb74284 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + job_trigger = dlp_v2.JobTrigger() + job_trigger.status = "CANCELLED" + + request = dlp_v2.CreateJobTriggerRequest( + parent="parent_value", + job_trigger=job_trigger, + ) + + # Make the request + response = client.create_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py new file mode 100644 index 00000000..cae6db89 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateStoredInfoType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateStoredInfoTypeRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py new file mode 100644 index 00000000..d59a301d --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateStoredInfoType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateStoredInfoTypeRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py new file mode 100644 index 00000000..4903b032 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeidentifyContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeidentifyContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_deidentify_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeidentifyContentRequest( + ) + + # Make the request + response = await client.deidentify_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_DeidentifyContent_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py new file mode 100644 index 00000000..2422616c --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeidentifyContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeidentifyContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_deidentify_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeidentifyContentRequest( + ) + + # Make the request + response = client.deidentify_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_DeidentifyContent_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py new file mode 100644 index 00000000..f544f12d --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_deidentify_template(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py new file mode 100644 index 00000000..a33f3b26 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_deidentify_template(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py new file mode 100644 index 00000000..8737125b --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.delete_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py new file mode 100644 index 00000000..bb0ce9df --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDlpJobRequest( + name="name_value", + ) + + # Make the request + client.delete_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py new file mode 100644 index 00000000..f0aec8eb --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteInspectTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteInspectTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_inspect_template(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py new file mode 100644 index 00000000..c908d867 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteInspectTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteInspectTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_inspect_template(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py new file mode 100644 index 00000000..3784ee3e --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteJobTriggerRequest( + name="name_value", + ) + + # Make the request + await client.delete_job_trigger(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py new file mode 100644 index 00000000..9f4405da --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteJobTriggerRequest( + name="name_value", + ) + + # Make the request + client.delete_job_trigger(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py new file mode 100644 index 00000000..652d88ff --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteStoredInfoType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + await client.delete_stored_info_type(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py new file mode 100644 index 00000000..7e37ce36 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteStoredInfoType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + client.delete_stored_info_type(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py new file mode 100644 index 00000000..869504da --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FinishDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_FinishDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_finish_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.FinishDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.finish_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_FinishDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py new file mode 100644 index 00000000..1b694f90 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FinishDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_FinishDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_finish_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.FinishDlpJobRequest( + name="name_value", + ) + + # Make the request + client.finish_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_FinishDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py new file mode 100644 index 00000000..fc1570d3 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetDeidentifyTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py new file mode 100644 index 00000000..bb1e1986 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py new file mode 100644 index 00000000..2065aa85 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDlpJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py new file mode 100644 index 00000000..13959bde --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDlpJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py new file mode 100644 index 00000000..1a9c9649 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetInspectTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py new file mode 100644 index 00000000..112e3d83 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetInspectTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py new file mode 100644 index 00000000..248184c7 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py new file mode 100644 index 00000000..9c6cdb3a --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.get_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py new file mode 100644 index 00000000..a7820fe2 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetStoredInfoType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py new file mode 100644 index 00000000..d0b0a44c --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetStoredInfoType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py new file mode 100644 index 00000000..e9f9be5a --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for HybridInspectDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_HybridInspectDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_hybrid_inspect_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectDlpJobRequest( + name="name_value", + ) + + # Make the request + response = await client.hybrid_inspect_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_HybridInspectDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py new file mode 100644 index 00000000..2bfd7fe1 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for HybridInspectDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_HybridInspectDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_hybrid_inspect_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectDlpJobRequest( + name="name_value", + ) + + # Make the request + response = client.hybrid_inspect_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_HybridInspectDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py new file mode 100644 index 00000000..dbdd91c2 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for HybridInspectJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_HybridInspectJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_hybrid_inspect_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.hybrid_inspect_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_HybridInspectJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py new file mode 100644 index 00000000..a9c4c85e --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for HybridInspectJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_hybrid_inspect_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.hybrid_inspect_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py new file mode 100644 index 00000000..3f24588b --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InspectContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_InspectContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_inspect_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.InspectContentRequest( + ) + + # Make the request + response = await client.inspect_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_InspectContent_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py new file mode 100644 index 00000000..4b5a10f3 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InspectContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_InspectContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_inspect_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.InspectContentRequest( + ) + + # Make the request + response = client.inspect_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_InspectContent_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py new file mode 100644 index 00000000..d1a40dc0 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeidentifyTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListDeidentifyTemplates_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_deidentify_templates(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDeidentifyTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deidentify_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListDeidentifyTemplates_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py new file mode 100644 index 00000000..6a01f0fb --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeidentifyTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_deidentify_templates(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDeidentifyTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deidentify_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py new file mode 100644 index 00000000..57c790d8 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDlpJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListDlpJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_dlp_jobs(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDlpJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dlp_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListDlpJobs_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py new file mode 100644 index 00000000..7d06c237 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDlpJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListDlpJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_dlp_jobs(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDlpJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dlp_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListDlpJobs_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py new file mode 100644 index 00000000..16b871f8 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInfoTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListInfoTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_info_types(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListInfoTypesRequest( + ) + + # Make the request + response = await client.list_info_types(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ListInfoTypes_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py new file mode 100644 index 00000000..9e3ca167 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInfoTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListInfoTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_info_types(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListInfoTypesRequest( + ) + + # Make the request + response = client.list_info_types(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ListInfoTypes_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py new file mode 100644 index 00000000..6e405a4f --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInspectTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListInspectTemplates_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_inspect_templates(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListInspectTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inspect_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListInspectTemplates_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py new file mode 100644 index 00000000..71673677 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInspectTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListInspectTemplates_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_inspect_templates(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListInspectTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inspect_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListInspectTemplates_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py new file mode 100644 index 00000000..e8c0281f --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobTriggers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListJobTriggers_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_job_triggers(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListJobTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_triggers(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListJobTriggers_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py new file mode 100644 index 00000000..0f9141c0 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobTriggers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListJobTriggers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_job_triggers(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListJobTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_triggers(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListJobTriggers_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py new file mode 100644 index 00000000..460c99c4 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListStoredInfoTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListStoredInfoTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_stored_info_types(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListStoredInfoTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_stored_info_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListStoredInfoTypes_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py new file mode 100644 index 00000000..1ad1796e --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListStoredInfoTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListStoredInfoTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_stored_info_types(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListStoredInfoTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_stored_info_types(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListStoredInfoTypes_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py new file mode 100644 index 00000000..a7a0d502 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RedactImage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_RedactImage_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_redact_image(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.RedactImageRequest( + ) + + # Make the request + response = await client.redact_image(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_RedactImage_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py new file mode 100644 index 00000000..272bdb80 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RedactImage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_RedactImage_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_redact_image(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.RedactImageRequest( + ) + + # Make the request + response = client.redact_image(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_RedactImage_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py new file mode 100644 index 00000000..401f62df --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReidentifyContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ReidentifyContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_reidentify_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ReidentifyContentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.reidentify_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ReidentifyContent_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py new file mode 100644 index 00000000..9e654be9 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReidentifyContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ReidentifyContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_reidentify_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ReidentifyContentRequest( + parent="parent_value", + ) + + # Make the request + response = client.reidentify_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ReidentifyContent_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py new file mode 100644 index 00000000..8b32186c --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_update_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.update_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py new file mode 100644 index 00000000..e3296531 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_update_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.update_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py new file mode 100644 index 00000000..8e062116 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateInspectTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_update_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.update_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py new file mode 100644 index 00000000..332c5de6 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateInspectTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_update_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.update_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py new file mode 100644 index 00000000..58baaeeb --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_update_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.update_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py new file mode 100644 index 00000000..3694b5ff --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_update_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.update_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py new file mode 100644 index 00000000..d5658d32 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateStoredInfoType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_update_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.update_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py new file mode 100644 index 00000000..9471180b --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateStoredInfoType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_update_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = client.update_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json b/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json new file mode 100644 index 00000000..956f9eab --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json @@ -0,0 +1,5503 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.privacy.dlp.v2", + "version": "v2" + } + ], + "language": "PYTHON", + "name": "google-cloud-dlp", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.activate_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ActivateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ActivateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ActivateJobTriggerRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "activate_job_trigger" + }, + "description": "Sample for ActivateJobTrigger", + "file": "dlp_v2_generated_dlp_service_activate_job_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ActivateJobTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_activate_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.activate_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ActivateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ActivateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ActivateJobTriggerRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "activate_job_trigger" + }, + "description": "Sample for ActivateJobTrigger", + "file": "dlp_v2_generated_dlp_service_activate_job_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ActivateJobTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_activate_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.cancel_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CancelDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CancelDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CancelDlpJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "cancel_dlp_job" + }, + "description": "Sample for CancelDlpJob", + "file": "dlp_v2_generated_dlp_service_cancel_dlp_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CancelDlpJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_cancel_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.cancel_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CancelDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CancelDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CancelDlpJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "cancel_dlp_job" + }, + "description": "Sample for CancelDlpJob", + "file": "dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CancelDlpJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deidentify_template", + "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "create_deidentify_template" + }, + "description": "Sample for CreateDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_create_deidentify_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_deidentify_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deidentify_template", + "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "create_deidentify_template" + }, + "description": "Sample for CreateDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_create_deidentify_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_deidentify_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateDlpJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "inspect_job", + "type": "google.cloud.dlp_v2.types.InspectJobConfig" + }, + { + "name": "risk_job", + "type": "google.cloud.dlp_v2.types.RiskAnalysisJobConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "create_dlp_job" + }, + "description": "Sample for CreateDlpJob", + "file": "dlp_v2_generated_dlp_service_create_dlp_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateDlpJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateDlpJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "inspect_job", + "type": "google.cloud.dlp_v2.types.InspectJobConfig" + }, + { + "name": "risk_job", + "type": "google.cloud.dlp_v2.types.RiskAnalysisJobConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "create_dlp_job" + }, + "description": "Sample for CreateDlpJob", + "file": "dlp_v2_generated_dlp_service_create_dlp_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateDlpJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateInspectTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "inspect_template", + "type": "google.cloud.dlp_v2.types.InspectTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "create_inspect_template" + }, + "description": "Sample for CreateInspectTemplate", + "file": "dlp_v2_generated_dlp_service_create_inspect_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateInspectTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_inspect_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateInspectTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "inspect_template", + "type": "google.cloud.dlp_v2.types.InspectTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "create_inspect_template" + }, + "description": "Sample for CreateInspectTemplate", + "file": "dlp_v2_generated_dlp_service_create_inspect_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateInspectTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_inspect_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateJobTriggerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "job_trigger", + "type": "google.cloud.dlp_v2.types.JobTrigger" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "create_job_trigger" + }, + "description": "Sample for CreateJobTrigger", + "file": "dlp_v2_generated_dlp_service_create_job_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateJobTrigger_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateJobTriggerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "job_trigger", + "type": "google.cloud.dlp_v2.types.JobTrigger" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "create_job_trigger" + }, + "description": "Sample for CreateJobTrigger", + "file": "dlp_v2_generated_dlp_service_create_job_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateJobTrigger_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "config", + "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "create_stored_info_type" + }, + "description": "Sample for CreateStoredInfoType", + "file": "dlp_v2_generated_dlp_service_create_stored_info_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateStoredInfoType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_stored_info_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "config", + "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "create_stored_info_type" + }, + "description": "Sample for CreateStoredInfoType", + "file": "dlp_v2_generated_dlp_service_create_stored_info_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateStoredInfoType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_stored_info_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.deidentify_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeidentifyContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeidentifyContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeidentifyContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyContentResponse", + "shortName": "deidentify_content" + }, + "description": "Sample for DeidentifyContent", + "file": "dlp_v2_generated_dlp_service_deidentify_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeidentifyContent_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_deidentify_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.deidentify_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeidentifyContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeidentifyContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeidentifyContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyContentResponse", + "shortName": "deidentify_content" + }, + "description": "Sample for DeidentifyContent", + "file": "dlp_v2_generated_dlp_service_deidentify_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeidentifyContent_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_deidentify_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_deidentify_template" + }, + "description": "Sample for DeleteDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_delete_deidentify_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_deidentify_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_deidentify_template" + }, + "description": "Sample for DeleteDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_dlp_job" + }, + "description": "Sample for DeleteDlpJob", + "file": "dlp_v2_generated_dlp_service_delete_dlp_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteDlpJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_dlp_job" + }, + "description": "Sample for DeleteDlpJob", + "file": "dlp_v2_generated_dlp_service_delete_dlp_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteDlpJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_inspect_template" + }, + "description": "Sample for DeleteInspectTemplate", + "file": "dlp_v2_generated_dlp_service_delete_inspect_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteInspectTemplate_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_inspect_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_inspect_template" + }, + "description": "Sample for DeleteInspectTemplate", + "file": "dlp_v2_generated_dlp_service_delete_inspect_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteInspectTemplate_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_inspect_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_job_trigger" + }, + "description": "Sample for DeleteJobTrigger", + "file": "dlp_v2_generated_dlp_service_delete_job_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteJobTrigger_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_job_trigger" + }, + "description": "Sample for DeleteJobTrigger", + "file": "dlp_v2_generated_dlp_service_delete_job_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteJobTrigger_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_stored_info_type" + }, + "description": "Sample for DeleteStoredInfoType", + "file": "dlp_v2_generated_dlp_service_delete_stored_info_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteStoredInfoType_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_stored_info_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_stored_info_type" + }, + "description": "Sample for DeleteStoredInfoType", + "file": "dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteStoredInfoType_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.finish_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.FinishDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "FinishDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.FinishDlpJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "finish_dlp_job" + }, + "description": "Sample for FinishDlpJob", + "file": "dlp_v2_generated_dlp_service_finish_dlp_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_FinishDlpJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_finish_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.finish_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.FinishDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "FinishDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.FinishDlpJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "finish_dlp_job" + }, + "description": "Sample for FinishDlpJob", + "file": "dlp_v2_generated_dlp_service_finish_dlp_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_FinishDlpJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_finish_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "get_deidentify_template" + }, + "description": "Sample for GetDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_get_deidentify_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetDeidentifyTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_deidentify_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "get_deidentify_template" + }, + "description": "Sample for GetDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_get_deidentify_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_deidentify_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "get_dlp_job" + }, + "description": "Sample for GetDlpJob", + "file": "dlp_v2_generated_dlp_service_get_dlp_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetDlpJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "get_dlp_job" + }, + "description": "Sample for GetDlpJob", + "file": "dlp_v2_generated_dlp_service_get_dlp_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetDlpJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "get_inspect_template" + }, + "description": "Sample for GetInspectTemplate", + "file": "dlp_v2_generated_dlp_service_get_inspect_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetInspectTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_inspect_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "get_inspect_template" + }, + "description": "Sample for GetInspectTemplate", + "file": "dlp_v2_generated_dlp_service_get_inspect_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetInspectTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_inspect_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "get_job_trigger" + }, + "description": "Sample for GetJobTrigger", + "file": "dlp_v2_generated_dlp_service_get_job_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetJobTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "get_job_trigger" + }, + "description": "Sample for GetJobTrigger", + "file": "dlp_v2_generated_dlp_service_get_job_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetJobTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "get_stored_info_type" + }, + "description": "Sample for GetStoredInfoType", + "file": "dlp_v2_generated_dlp_service_get_stored_info_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetStoredInfoType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_stored_info_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "get_stored_info_type" + }, + "description": "Sample for GetStoredInfoType", + "file": "dlp_v2_generated_dlp_service_get_stored_info_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetStoredInfoType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_stored_info_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.hybrid_inspect_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "HybridInspectDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.HybridInspectDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", + "shortName": "hybrid_inspect_dlp_job" + }, + "description": "Sample for HybridInspectDlpJob", + "file": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_HybridInspectDlpJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.hybrid_inspect_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "HybridInspectDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.HybridInspectDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", + "shortName": "hybrid_inspect_dlp_job" + }, + "description": "Sample for HybridInspectDlpJob", + "file": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_HybridInspectDlpJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.hybrid_inspect_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "HybridInspectJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", + "shortName": "hybrid_inspect_job_trigger" + }, + "description": "Sample for HybridInspectJobTrigger", + "file": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_HybridInspectJobTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.hybrid_inspect_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "HybridInspectJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", + "shortName": "hybrid_inspect_job_trigger" + }, + "description": "Sample for HybridInspectJobTrigger", + "file": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.inspect_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.InspectContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "InspectContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.InspectContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectContentResponse", + "shortName": "inspect_content" + }, + "description": "Sample for InspectContent", + "file": "dlp_v2_generated_dlp_service_inspect_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_InspectContent_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_inspect_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.inspect_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.InspectContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "InspectContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.InspectContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectContentResponse", + "shortName": "inspect_content" + }, + "description": "Sample for InspectContent", + "file": "dlp_v2_generated_dlp_service_inspect_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_InspectContent_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_inspect_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_deidentify_templates", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListDeidentifyTemplates", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListDeidentifyTemplates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager", + "shortName": "list_deidentify_templates" + }, + "description": "Sample for ListDeidentifyTemplates", + "file": "dlp_v2_generated_dlp_service_list_deidentify_templates_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListDeidentifyTemplates_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_deidentify_templates_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_deidentify_templates", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListDeidentifyTemplates", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListDeidentifyTemplates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager", + "shortName": "list_deidentify_templates" + }, + "description": "Sample for ListDeidentifyTemplates", + "file": "dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_dlp_jobs", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListDlpJobs", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListDlpJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListDlpJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager", + "shortName": "list_dlp_jobs" + }, + "description": "Sample for ListDlpJobs", + "file": "dlp_v2_generated_dlp_service_list_dlp_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListDlpJobs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_dlp_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_dlp_jobs", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListDlpJobs", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListDlpJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListDlpJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager", + "shortName": "list_dlp_jobs" + }, + "description": "Sample for ListDlpJobs", + "file": "dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListDlpJobs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_info_types", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListInfoTypes", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListInfoTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListInfoTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ListInfoTypesResponse", + "shortName": "list_info_types" + }, + "description": "Sample for ListInfoTypes", + "file": "dlp_v2_generated_dlp_service_list_info_types_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListInfoTypes_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_info_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_info_types", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListInfoTypes", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListInfoTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListInfoTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ListInfoTypesResponse", + "shortName": "list_info_types" + }, + "description": "Sample for ListInfoTypes", + "file": "dlp_v2_generated_dlp_service_list_info_types_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListInfoTypes_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_info_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_inspect_templates", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListInspectTemplates", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListInspectTemplates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListInspectTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager", + "shortName": "list_inspect_templates" + }, + "description": "Sample for ListInspectTemplates", + "file": "dlp_v2_generated_dlp_service_list_inspect_templates_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListInspectTemplates_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_inspect_templates_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_inspect_templates", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListInspectTemplates", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListInspectTemplates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListInspectTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager", + "shortName": "list_inspect_templates" + }, + "description": "Sample for ListInspectTemplates", + "file": "dlp_v2_generated_dlp_service_list_inspect_templates_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListInspectTemplates_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_inspect_templates_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_job_triggers", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListJobTriggers", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListJobTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListJobTriggersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager", + "shortName": "list_job_triggers" + }, + "description": "Sample for ListJobTriggers", + "file": "dlp_v2_generated_dlp_service_list_job_triggers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListJobTriggers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_job_triggers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_job_triggers", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListJobTriggers", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListJobTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListJobTriggersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager", + "shortName": "list_job_triggers" + }, + "description": "Sample for ListJobTriggers", + "file": "dlp_v2_generated_dlp_service_list_job_triggers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListJobTriggers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_job_triggers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_stored_info_types", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListStoredInfoTypes", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListStoredInfoTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListStoredInfoTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager", + "shortName": "list_stored_info_types" + }, + "description": "Sample for ListStoredInfoTypes", + "file": "dlp_v2_generated_dlp_service_list_stored_info_types_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListStoredInfoTypes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_stored_info_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_stored_info_types", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListStoredInfoTypes", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListStoredInfoTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListStoredInfoTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager", + "shortName": "list_stored_info_types" + }, + "description": "Sample for ListStoredInfoTypes", + "file": "dlp_v2_generated_dlp_service_list_stored_info_types_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListStoredInfoTypes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_stored_info_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.redact_image", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.RedactImage", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "RedactImage" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.RedactImageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.RedactImageResponse", + "shortName": "redact_image" + }, + "description": "Sample for RedactImage", + "file": "dlp_v2_generated_dlp_service_redact_image_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_RedactImage_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_redact_image_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.redact_image", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.RedactImage", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "RedactImage" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.RedactImageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.RedactImageResponse", + "shortName": "redact_image" + }, + "description": "Sample for RedactImage", + "file": "dlp_v2_generated_dlp_service_redact_image_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_RedactImage_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_redact_image_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.reidentify_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ReidentifyContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ReidentifyContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ReidentifyContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ReidentifyContentResponse", + "shortName": "reidentify_content" + }, + "description": "Sample for ReidentifyContent", + "file": "dlp_v2_generated_dlp_service_reidentify_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ReidentifyContent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_reidentify_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.reidentify_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ReidentifyContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ReidentifyContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ReidentifyContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ReidentifyContentResponse", + "shortName": "reidentify_content" + }, + "description": "Sample for ReidentifyContent", + "file": "dlp_v2_generated_dlp_service_reidentify_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ReidentifyContent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_reidentify_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "deidentify_template", + "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "update_deidentify_template" + }, + "description": "Sample for UpdateDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_update_deidentify_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_deidentify_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "deidentify_template", + "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "update_deidentify_template" + }, + "description": "Sample for UpdateDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_update_deidentify_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_deidentify_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "inspect_template", + "type": "google.cloud.dlp_v2.types.InspectTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "update_inspect_template" + }, + "description": "Sample for UpdateInspectTemplate", + "file": "dlp_v2_generated_dlp_service_update_inspect_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateInspectTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_inspect_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "inspect_template", + "type": "google.cloud.dlp_v2.types.InspectTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "update_inspect_template" + }, + "description": "Sample for UpdateInspectTemplate", + "file": "dlp_v2_generated_dlp_service_update_inspect_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateInspectTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_inspect_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "job_trigger", + "type": "google.cloud.dlp_v2.types.JobTrigger" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "update_job_trigger" + }, + "description": "Sample for UpdateJobTrigger", + "file": "dlp_v2_generated_dlp_service_update_job_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateJobTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "job_trigger", + "type": "google.cloud.dlp_v2.types.JobTrigger" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "update_job_trigger" + }, + "description": "Sample for UpdateJobTrigger", + "file": "dlp_v2_generated_dlp_service_update_job_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateJobTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "config", + "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "update_stored_info_type" + }, + "description": "Sample for UpdateStoredInfoType", + "file": "dlp_v2_generated_dlp_service_update_stored_info_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateStoredInfoType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_stored_info_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "config", + "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "update_stored_info_type" + }, + "description": "Sample for UpdateStoredInfoType", + "file": "dlp_v2_generated_dlp_service_update_stored_info_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateStoredInfoType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_stored_info_type_sync.py" + } + ] +} diff --git a/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py b/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py new file mode 100644 index 00000000..9adcd0d5 --- /dev/null +++ b/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py @@ -0,0 +1,209 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class dlpCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'activate_job_trigger': ('name', ), + 'cancel_dlp_job': ('name', ), + 'create_deidentify_template': ('parent', 'deidentify_template', 'template_id', 'location_id', ), + 'create_dlp_job': ('parent', 'inspect_job', 'risk_job', 'job_id', 'location_id', ), + 'create_inspect_template': ('parent', 'inspect_template', 'template_id', 'location_id', ), + 'create_job_trigger': ('parent', 'job_trigger', 'trigger_id', 'location_id', ), + 'create_stored_info_type': ('parent', 'config', 'stored_info_type_id', 'location_id', ), + 'deidentify_content': ('parent', 'deidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'deidentify_template_name', 'location_id', ), + 'delete_deidentify_template': ('name', ), + 'delete_dlp_job': ('name', ), + 'delete_inspect_template': ('name', ), + 'delete_job_trigger': ('name', ), + 'delete_stored_info_type': ('name', ), + 'finish_dlp_job': ('name', ), + 'get_deidentify_template': ('name', ), + 'get_dlp_job': ('name', ), + 'get_inspect_template': ('name', ), + 'get_job_trigger': ('name', ), + 'get_stored_info_type': ('name', ), + 'hybrid_inspect_dlp_job': ('name', 'hybrid_item', ), + 'hybrid_inspect_job_trigger': ('name', 'hybrid_item', ), + 'inspect_content': ('parent', 'inspect_config', 'item', 'inspect_template_name', 'location_id', ), + 'list_deidentify_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'list_dlp_jobs': ('parent', 'filter', 'page_size', 'page_token', 'type_', 'order_by', 'location_id', ), + 'list_info_types': ('parent', 'language_code', 'filter', 'location_id', ), + 'list_inspect_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'list_job_triggers': ('parent', 'page_token', 'page_size', 'order_by', 'filter', 'type_', 'location_id', ), + 'list_stored_info_types': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'redact_image': ('parent', 'location_id', 'inspect_config', 'image_redaction_configs', 'include_findings', 'byte_item', ), + 'reidentify_content': ('parent', 'reidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'reidentify_template_name', 'location_id', ), + 'update_deidentify_template': ('name', 'deidentify_template', 'update_mask', ), + 'update_inspect_template': ('name', 'inspect_template', 'update_mask', ), + 'update_job_trigger': ('name', 'job_trigger', 'update_mask', ), + 'update_stored_info_type': ('name', 'config', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=dlpCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the dlp client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v2/setup.py b/owl-bot-staging/v2/setup.py new file mode 100644 index 00000000..2b4eb21b --- /dev/null +++ b/owl-bot-staging/v2/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-dlp' + + +description = "Google Cloud Dlp API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/dlp/gapic_version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/python-dlp" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/v2/testing/constraints-3.10.txt b/owl-bot-staging/v2/testing/constraints-3.10.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.11.txt b/owl-bot-staging/v2/testing/constraints-3.11.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.12.txt b/owl-bot-staging/v2/testing/constraints-3.12.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.7.txt b/owl-bot-staging/v2/testing/constraints-3.7.txt new file mode 100644 index 00000000..6c44adfe --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/owl-bot-staging/v2/testing/constraints-3.8.txt b/owl-bot-staging/v2/testing/constraints-3.8.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.9.txt b/owl-bot-staging/v2/testing/constraints-3.9.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v2/tests/__init__.py b/owl-bot-staging/v2/tests/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v2/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/__init__.py b/owl-bot-staging/v2/tests/unit/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py new file mode 100644 index 00000000..64618efd --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py @@ -0,0 +1,17404 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dlp_v2.services.dlp_service import DlpServiceAsyncClient +from google.cloud.dlp_v2.services.dlp_service import DlpServiceClient +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.services.dlp_service import transports +from google.cloud.dlp_v2.types import dlp +from google.cloud.dlp_v2.types import storage +from google.cloud.location import locations_pb2 +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DlpServiceClient._get_default_mtls_endpoint(None) is None + assert DlpServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DlpServiceClient, "grpc"), + (DlpServiceAsyncClient, "grpc_asyncio"), + (DlpServiceClient, "rest"), +]) +def test_dlp_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dlp.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dlp.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DlpServiceGrpcTransport, "grpc"), + (transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DlpServiceRestTransport, "rest"), +]) +def test_dlp_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DlpServiceClient, "grpc"), + (DlpServiceAsyncClient, "grpc_asyncio"), + (DlpServiceClient, "rest"), +]) +def test_dlp_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dlp.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dlp.googleapis.com' + ) + + +def test_dlp_service_client_get_transport_class(): + transport = DlpServiceClient.get_transport_class() + available_transports = [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceRestTransport, + ] + assert transport in available_transports + + transport = DlpServiceClient.get_transport_class("grpc") + assert transport == transports.DlpServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), +]) +@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) +@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) +def test_dlp_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "true"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "false"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "true"), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "false"), +]) +@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) +@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_dlp_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + DlpServiceClient, DlpServiceAsyncClient +]) +@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) +@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) +def test_dlp_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), +]) +def test_dlp_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", grpc_helpers), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest", None), +]) +def test_dlp_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_dlp_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = DlpServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", grpc_helpers), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_dlp_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dlp.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="dlp.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.InspectContentRequest, + dict, +]) +def test_inspect_content(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectContentResponse( + ) + response = client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.InspectContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +def test_inspect_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + client.inspect_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.InspectContentRequest() + +@pytest.mark.asyncio +async def test_inspect_content_async(transport: str = 'grpc_asyncio', request_type=dlp.InspectContentRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse( + )) + response = await client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.InspectContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +@pytest.mark.asyncio +async def test_inspect_content_async_from_dict(): + await test_inspect_content_async(request_type=dict) + + +def test_inspect_content_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.InspectContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + call.return_value = dlp.InspectContentResponse() + client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_inspect_content_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.InspectContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse()) + await client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.RedactImageRequest, + dict, +]) +def test_redact_image(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.RedactImageResponse( + redacted_image=b'redacted_image_blob', + extracted_text='extracted_text_value', + ) + response = client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.RedactImageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + assert response.redacted_image == b'redacted_image_blob' + assert response.extracted_text == 'extracted_text_value' + + +def test_redact_image_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + client.redact_image() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.RedactImageRequest() + +@pytest.mark.asyncio +async def test_redact_image_async(transport: str = 'grpc_asyncio', request_type=dlp.RedactImageRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse( + redacted_image=b'redacted_image_blob', + extracted_text='extracted_text_value', + )) + response = await client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.RedactImageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + assert response.redacted_image == b'redacted_image_blob' + assert response.extracted_text == 'extracted_text_value' + + +@pytest.mark.asyncio +async def test_redact_image_async_from_dict(): + await test_redact_image_async(request_type=dict) + + +def test_redact_image_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.RedactImageRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + call.return_value = dlp.RedactImageResponse() + client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_redact_image_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.RedactImageRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse()) + await client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.DeidentifyContentRequest, + dict, +]) +def test_deidentify_content(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyContentResponse( + ) + response = client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +def test_deidentify_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + client.deidentify_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeidentifyContentRequest() + +@pytest.mark.asyncio +async def test_deidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.DeidentifyContentRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse( + )) + response = await client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +@pytest.mark.asyncio +async def test_deidentify_content_async_from_dict(): + await test_deidentify_content_async(request_type=dict) + + +def test_deidentify_content_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeidentifyContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + call.return_value = dlp.DeidentifyContentResponse() + client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_deidentify_content_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeidentifyContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse()) + await client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.ReidentifyContentRequest, + dict, +]) +def test_reidentify_content(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ReidentifyContentResponse( + ) + response = client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ReidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +def test_reidentify_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + client.reidentify_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ReidentifyContentRequest() + +@pytest.mark.asyncio +async def test_reidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.ReidentifyContentRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse( + )) + response = await client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ReidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +@pytest.mark.asyncio +async def test_reidentify_content_async_from_dict(): + await test_reidentify_content_async(request_type=dict) + + +def test_reidentify_content_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ReidentifyContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + call.return_value = dlp.ReidentifyContentResponse() + client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_reidentify_content_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ReidentifyContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse()) + await client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.ListInfoTypesRequest, + dict, +]) +def test_list_info_types(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse( + ) + response = client.list_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +def test_list_info_types_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + client.list_info_types() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInfoTypesRequest() + +@pytest.mark.asyncio +async def test_list_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInfoTypesRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse( + )) + response = await client.list_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +@pytest.mark.asyncio +async def test_list_info_types_async_from_dict(): + await test_list_info_types_async(request_type=dict) + + +def test_list_info_types_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_info_types_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_info_types( + dlp.ListInfoTypesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_info_types_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_info_types_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_info_types( + dlp.ListInfoTypesRequest(), + parent='parent_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateInspectTemplateRequest, + dict, +]) +def test_create_inspect_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + client.create_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateInspectTemplateRequest() + +@pytest.mark.asyncio +async def test_create_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_create_inspect_template_async_from_dict(): + await test_create_inspect_template_async(request_type=dict) + + +def test_create_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateInspectTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + call.return_value = dlp.InspectTemplate() + client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateInspectTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + await client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_inspect_template( + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name='name_value') + assert arg == mock_val + + +def test_create_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_inspect_template( + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateInspectTemplateRequest, + dict, +]) +def test_update_inspect_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + client.update_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateInspectTemplateRequest() + +@pytest.mark.asyncio +async def test_update_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_update_inspect_template_async_from_dict(): + await test_update_inspect_template_async(request_type=dict) + + +def test_update_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + call.return_value = dlp.InspectTemplate() + client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + await client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_inspect_template( + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_inspect_template( + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetInspectTemplateRequest, + dict, +]) +def test_get_inspect_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + client.get_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetInspectTemplateRequest() + +@pytest.mark.asyncio +async def test_get_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_get_inspect_template_async_from_dict(): + await test_get_inspect_template_async(request_type=dict) + + +def test_get_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + call.return_value = dlp.InspectTemplate() + client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + await client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_inspect_template( + dlp.GetInspectTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_inspect_template( + dlp.GetInspectTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListInspectTemplatesRequest, + dict, +]) +def test_list_inspect_templates(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInspectTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_inspect_templates_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + client.list_inspect_templates() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInspectTemplatesRequest() + +@pytest.mark.asyncio +async def test_list_inspect_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInspectTemplatesRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInspectTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_from_dict(): + await test_list_inspect_templates_async(request_type=dict) + + +def test_list_inspect_templates_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListInspectTemplatesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + call.return_value = dlp.ListInspectTemplatesResponse() + client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_inspect_templates_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListInspectTemplatesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) + await client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_inspect_templates_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_inspect_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_inspect_templates_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_inspect_templates_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_inspect_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_inspect_templates_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_inspect_templates_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_inspect_templates(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.InspectTemplate) + for i in results) +def test_list_inspect_templates_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + pages = list(client.list_inspect_templates(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_inspect_templates(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.InspectTemplate) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_inspect_templates(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteInspectTemplateRequest, + dict, +]) +def test_delete_inspect_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + client.delete_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteInspectTemplateRequest() + +@pytest.mark.asyncio +async def test_delete_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_inspect_template_async_from_dict(): + await test_delete_inspect_template_async(request_type=dict) + + +def test_delete_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + call.return_value = None + client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateDeidentifyTemplateRequest, + dict, +]) +def test_create_deidentify_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + client.create_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDeidentifyTemplateRequest() + +@pytest.mark.asyncio +async def test_create_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_create_deidentify_template_async_from_dict(): + await test_create_deidentify_template_async(request_type=dict) + + +def test_create_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDeidentifyTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + call.return_value = dlp.DeidentifyTemplate() + client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDeidentifyTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + await client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_deidentify_template( + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name='name_value') + assert arg == mock_val + + +def test_create_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_deidentify_template( + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateDeidentifyTemplateRequest, + dict, +]) +def test_update_deidentify_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + client.update_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateDeidentifyTemplateRequest() + +@pytest.mark.asyncio +async def test_update_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_update_deidentify_template_async_from_dict(): + await test_update_deidentify_template_async(request_type=dict) + + +def test_update_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + call.return_value = dlp.DeidentifyTemplate() + client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + await client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_deidentify_template( + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_deidentify_template( + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetDeidentifyTemplateRequest, + dict, +]) +def test_get_deidentify_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + client.get_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDeidentifyTemplateRequest() + +@pytest.mark.asyncio +async def test_get_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_get_deidentify_template_async_from_dict(): + await test_get_deidentify_template_async(request_type=dict) + + +def test_get_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + call.return_value = dlp.DeidentifyTemplate() + client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + await client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListDeidentifyTemplatesRequest, + dict, +]) +def test_list_deidentify_templates(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDeidentifyTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_deidentify_templates_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + client.list_deidentify_templates() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDeidentifyTemplatesRequest() + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDeidentifyTemplatesRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDeidentifyTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_from_dict(): + await test_list_deidentify_templates_async(request_type=dict) + + +def test_list_deidentify_templates_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDeidentifyTemplatesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + call.return_value = dlp.ListDeidentifyTemplatesResponse() + client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDeidentifyTemplatesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) + await client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_deidentify_templates_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_deidentify_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_deidentify_templates_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_deidentify_templates_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_deidentify_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_deidentify_templates_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_deidentify_templates_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_deidentify_templates(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) + for i in results) +def test_list_deidentify_templates_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + pages = list(client.list_deidentify_templates(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_deidentify_templates(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_deidentify_templates(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteDeidentifyTemplateRequest, + dict, +]) +def test_delete_deidentify_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + client.delete_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDeidentifyTemplateRequest() + +@pytest.mark.asyncio +async def test_delete_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_async_from_dict(): + await test_delete_deidentify_template_async(request_type=dict) + + +def test_delete_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + call.return_value = None + client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateJobTriggerRequest, + dict, +]) +def test_create_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + response = client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_create_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + client.create_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateJobTriggerRequest() + +@pytest.mark.asyncio +async def test_create_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + )) + response = await client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.asyncio +async def test_create_job_trigger_async_from_dict(): + await test_create_job_trigger_async(request_type=dict) + + +def test_create_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateJobTriggerRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + call.return_value = dlp.JobTrigger() + client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateJobTriggerRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + await client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_job_trigger( + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name='name_value') + assert arg == mock_val + + +def test_create_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_job_trigger( + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateJobTriggerRequest, + dict, +]) +def test_update_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + response = client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_update_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + client.update_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateJobTriggerRequest() + +@pytest.mark.asyncio +async def test_update_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + )) + response = await client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.asyncio +async def test_update_job_trigger_async_from_dict(): + await test_update_job_trigger_async(request_type=dict) + + +def test_update_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + call.return_value = dlp.JobTrigger() + client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + await client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_job_trigger( + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_job_trigger( + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.HybridInspectJobTriggerRequest, + dict, +]) +def test_hybrid_inspect_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse( + ) + response = client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + client.hybrid_inspect_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectJobTriggerRequest() + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( + )) + response = await client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_async_from_dict(): + await test_hybrid_inspect_job_trigger_async(request_type=dict) + + +def test_hybrid_inspect_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + call.return_value = dlp.HybridInspectResponse() + client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + await client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_hybrid_inspect_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.hybrid_inspect_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_hybrid_inspect_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.hybrid_inspect_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetJobTriggerRequest, + dict, +]) +def test_get_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + response = client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_get_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + client.get_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetJobTriggerRequest() + +@pytest.mark.asyncio +async def test_get_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.GetJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + )) + response = await client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.asyncio +async def test_get_job_trigger_async_from_dict(): + await test_get_job_trigger_async(request_type=dict) + + +def test_get_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + call.return_value = dlp.JobTrigger() + client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + await client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job_trigger( + dlp.GetJobTriggerRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job_trigger( + dlp.GetJobTriggerRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListJobTriggersRequest, + dict, +]) +def test_list_job_triggers(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse( + next_page_token='next_page_token_value', + ) + response = client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListJobTriggersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_job_triggers_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + client.list_job_triggers() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListJobTriggersRequest() + +@pytest.mark.asyncio +async def test_list_job_triggers_async(transport: str = 'grpc_asyncio', request_type=dlp.ListJobTriggersRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListJobTriggersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_job_triggers_async_from_dict(): + await test_list_job_triggers_async(request_type=dict) + + +def test_list_job_triggers_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListJobTriggersRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + call.return_value = dlp.ListJobTriggersResponse() + client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_job_triggers_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListJobTriggersRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) + await client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_job_triggers_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_job_triggers( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_job_triggers_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_job_triggers( + dlp.ListJobTriggersRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_job_triggers_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_job_triggers( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_job_triggers_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_job_triggers( + dlp.ListJobTriggersRequest(), + parent='parent_value', + ) + + +def test_list_job_triggers_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_job_triggers(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.JobTrigger) + for i in results) +def test_list_job_triggers_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + pages = list(client.list_job_triggers(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_job_triggers_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_job_triggers(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.JobTrigger) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_job_triggers_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_job_triggers(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteJobTriggerRequest, + dict, +]) +def test_delete_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + client.delete_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteJobTriggerRequest() + +@pytest.mark.asyncio +async def test_delete_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_job_trigger_async_from_dict(): + await test_delete_job_trigger_async(request_type=dict) + + +def test_delete_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + call.return_value = None + client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ActivateJobTriggerRequest, + dict, +]) +def test_activate_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + response = client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ActivateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_activate_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + client.activate_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ActivateJobTriggerRequest() + +@pytest.mark.asyncio +async def test_activate_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.ActivateJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + )) + response = await client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ActivateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +@pytest.mark.asyncio +async def test_activate_job_trigger_async_from_dict(): + await test_activate_job_trigger_async(request_type=dict) + + +def test_activate_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ActivateJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + call.return_value = dlp.DlpJob() + client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_activate_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ActivateJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + await client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateDlpJobRequest, + dict, +]) +def test_create_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + response = client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_create_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + client.create_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDlpJobRequest() + +@pytest.mark.asyncio +async def test_create_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + )) + response = await client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +@pytest.mark.asyncio +async def test_create_dlp_job_async_from_dict(): + await test_create_dlp_job_async(request_type=dict) + + +def test_create_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDlpJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + call.return_value = dlp.DlpJob() + client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDlpJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + await client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_dlp_job( + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) + + +def test_create_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + +@pytest.mark.asyncio +async def test_create_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_dlp_job( + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) + +@pytest.mark.asyncio +async def test_create_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListDlpJobsRequest, + dict, +]) +def test_list_dlp_jobs(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDlpJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_dlp_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + client.list_dlp_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDlpJobsRequest() + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDlpJobsRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDlpJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_from_dict(): + await test_list_dlp_jobs_async(request_type=dict) + + +def test_list_dlp_jobs_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDlpJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + call.return_value = dlp.ListDlpJobsResponse() + client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDlpJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) + await client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_dlp_jobs_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_dlp_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_dlp_jobs_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_dlp_jobs_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_dlp_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_dlp_jobs_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), + parent='parent_value', + ) + + +def test_list_dlp_jobs_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_dlp_jobs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DlpJob) + for i in results) +def test_list_dlp_jobs_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + pages = list(client.list_dlp_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_dlp_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.DlpJob) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_dlp_jobs(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.GetDlpJobRequest, + dict, +]) +def test_get_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + response = client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_get_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + client.get_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDlpJobRequest() + +@pytest.mark.asyncio +async def test_get_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + )) + response = await client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +@pytest.mark.asyncio +async def test_get_dlp_job_async_from_dict(): + await test_get_dlp_job_async(request_type=dict) + + +def test_get_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + call.return_value = dlp.DlpJob() + client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + await client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dlp_job( + dlp.GetDlpJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_dlp_job( + dlp.GetDlpJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteDlpJobRequest, + dict, +]) +def test_delete_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + client.delete_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDlpJobRequest() + +@pytest.mark.asyncio +async def test_delete_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_dlp_job_async_from_dict(): + await test_delete_dlp_job_async(request_type=dict) + + +def test_delete_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + call.return_value = None + client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CancelDlpJobRequest, + dict, +]) +def test_cancel_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CancelDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + client.cancel_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CancelDlpJobRequest() + +@pytest.mark.asyncio +async def test_cancel_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CancelDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CancelDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_dlp_job_async_from_dict(): + await test_cancel_dlp_job_async(request_type=dict) + + +def test_cancel_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CancelDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + call.return_value = None + client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_cancel_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CancelDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateStoredInfoTypeRequest, + dict, +]) +def test_create_stored_info_type(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType( + name='name_value', + ) + response = client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_create_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + client.create_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateStoredInfoTypeRequest() + +@pytest.mark.asyncio +async def test_create_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( + name='name_value', + )) + response = await client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_create_stored_info_type_async_from_dict(): + await test_create_stored_info_type_async(request_type=dict) + + +def test_create_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateStoredInfoTypeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + call.return_value = dlp.StoredInfoType() + client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateStoredInfoTypeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + await client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_stored_info_type( + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') + assert arg == mock_val + + +def test_create_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + +@pytest.mark.asyncio +async def test_create_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_stored_info_type( + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateStoredInfoTypeRequest, + dict, +]) +def test_update_stored_info_type(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType( + name='name_value', + ) + response = client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_update_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + client.update_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateStoredInfoTypeRequest() + +@pytest.mark.asyncio +async def test_update_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( + name='name_value', + )) + response = await client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_update_stored_info_type_async_from_dict(): + await test_update_stored_info_type_async(request_type=dict) + + +def test_update_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + call.return_value = dlp.StoredInfoType() + client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + await client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_stored_info_type( + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_stored_info_type( + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetStoredInfoTypeRequest, + dict, +]) +def test_get_stored_info_type(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType( + name='name_value', + ) + response = client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_get_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + client.get_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetStoredInfoTypeRequest() + +@pytest.mark.asyncio +async def test_get_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.GetStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( + name='name_value', + )) + response = await client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_stored_info_type_async_from_dict(): + await test_get_stored_info_type_async(request_type=dict) + + +def test_get_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + call.return_value = dlp.StoredInfoType() + client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + await client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListStoredInfoTypesRequest, + dict, +]) +def test_list_stored_info_types(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListStoredInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_stored_info_types_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + client.list_stored_info_types() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListStoredInfoTypesRequest() + +@pytest.mark.asyncio +async def test_list_stored_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListStoredInfoTypesRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListStoredInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_from_dict(): + await test_list_stored_info_types_async(request_type=dict) + + +def test_list_stored_info_types_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListStoredInfoTypesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + call.return_value = dlp.ListStoredInfoTypesResponse() + client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_stored_info_types_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListStoredInfoTypesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) + await client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_stored_info_types_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_stored_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_stored_info_types_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_stored_info_types_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_stored_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_stored_info_types_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), + parent='parent_value', + ) + + +def test_list_stored_info_types_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_stored_info_types(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.StoredInfoType) + for i in results) +def test_list_stored_info_types_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + pages = list(client.list_stored_info_types(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_stored_info_types(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.StoredInfoType) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_stored_info_types(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteStoredInfoTypeRequest, + dict, +]) +def test_delete_stored_info_type(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + client.delete_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteStoredInfoTypeRequest() + +@pytest.mark.asyncio +async def test_delete_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_async_from_dict(): + await test_delete_stored_info_type_async(request_type=dict) + + +def test_delete_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + call.return_value = None + client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.HybridInspectDlpJobRequest, + dict, +]) +def test_hybrid_inspect_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse( + ) + response = client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + client.hybrid_inspect_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectDlpJobRequest() + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( + )) + response = await client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_async_from_dict(): + await test_hybrid_inspect_dlp_job_async(request_type=dict) + + +def test_hybrid_inspect_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + call.return_value = dlp.HybridInspectResponse() + client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + await client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_hybrid_inspect_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.hybrid_inspect_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_hybrid_inspect_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.hybrid_inspect_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.FinishDlpJobRequest, + dict, +]) +def test_finish_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.FinishDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_finish_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + client.finish_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.FinishDlpJobRequest() + +@pytest.mark.asyncio +async def test_finish_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.FinishDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.FinishDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_finish_dlp_job_async_from_dict(): + await test_finish_dlp_job_async(request_type=dict) + + +def test_finish_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.FinishDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + call.return_value = None + client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_finish_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.FinishDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.InspectContentRequest, + dict, +]) +def test_inspect_content_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectContentResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.inspect_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_inspect_content_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_inspect_content") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_inspect_content") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.InspectContentRequest.pb(dlp.InspectContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectContentResponse.to_json(dlp.InspectContentResponse()) + + request = dlp.InspectContentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectContentResponse() + + client.inspect_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_inspect_content_rest_bad_request(transport: str = 'rest', request_type=dlp.InspectContentRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.inspect_content(request) + + +def test_inspect_content_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.RedactImageRequest, + dict, +]) +def test_redact_image_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.RedactImageResponse( + redacted_image=b'redacted_image_blob', + extracted_text='extracted_text_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.RedactImageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.redact_image(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + assert response.redacted_image == b'redacted_image_blob' + assert response.extracted_text == 'extracted_text_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_redact_image_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_redact_image") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_redact_image") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.RedactImageRequest.pb(dlp.RedactImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.RedactImageResponse.to_json(dlp.RedactImageResponse()) + + request = dlp.RedactImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.RedactImageResponse() + + client.redact_image(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_redact_image_rest_bad_request(transport: str = 'rest', request_type=dlp.RedactImageRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.redact_image(request) + + +def test_redact_image_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeidentifyContentRequest, + dict, +]) +def test_deidentify_content_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyContentResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.deidentify_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_deidentify_content_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_deidentify_content") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_deidentify_content") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.DeidentifyContentRequest.pb(dlp.DeidentifyContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyContentResponse.to_json(dlp.DeidentifyContentResponse()) + + request = dlp.DeidentifyContentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyContentResponse() + + client.deidentify_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_deidentify_content_rest_bad_request(transport: str = 'rest', request_type=dlp.DeidentifyContentRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.deidentify_content(request) + + +def test_deidentify_content_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ReidentifyContentRequest, + dict, +]) +def test_reidentify_content_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ReidentifyContentResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ReidentifyContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.reidentify_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +def test_reidentify_content_rest_required_fields(request_type=dlp.ReidentifyContentRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reidentify_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reidentify_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ReidentifyContentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ReidentifyContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.reidentify_content(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_reidentify_content_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.reidentify_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reidentify_content_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_reidentify_content") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_reidentify_content") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ReidentifyContentRequest.pb(dlp.ReidentifyContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ReidentifyContentResponse.to_json(dlp.ReidentifyContentResponse()) + + request = dlp.ReidentifyContentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ReidentifyContentResponse() + + client.reidentify_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_reidentify_content_rest_bad_request(transport: str = 'rest', request_type=dlp.ReidentifyContentRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.reidentify_content(request) + + +def test_reidentify_content_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListInfoTypesRequest, + dict, +]) +def test_list_info_types_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInfoTypesResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_info_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_info_types_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_info_types") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_info_types") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListInfoTypesRequest.pb(dlp.ListInfoTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListInfoTypesResponse.to_json(dlp.ListInfoTypesResponse()) + + request = dlp.ListInfoTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListInfoTypesResponse() + + client.list_info_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_info_types_rest_bad_request(transport: str = 'rest', request_type=dlp.ListInfoTypesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_info_types(request) + + +def test_list_info_types_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInfoTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_info_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/infoTypes" % client.transport._host, args[1]) + + +def test_list_info_types_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_info_types( + dlp.ListInfoTypesRequest(), + parent='parent_value', + ) + + +def test_list_info_types_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateInspectTemplateRequest, + dict, +]) +def test_create_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_inspect_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_inspect_template_rest_required_fields(request_type=dlp.CreateInspectTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_inspect_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "inspectTemplate", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_inspect_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_inspect_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateInspectTemplateRequest.pb(dlp.CreateInspectTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) + + request = dlp.CreateInspectTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectTemplate() + + client.create_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_inspect_template(request) + + +def test_create_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/inspectTemplates" % client.transport._host, args[1]) + + +def test_create_inspect_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + +def test_create_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateInspectTemplateRequest, + dict, +]) +def test_update_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_inspect_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_inspect_template_rest_required_fields(request_type=dlp.UpdateInspectTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_inspect_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_inspect_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_inspect_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateInspectTemplateRequest.pb(dlp.UpdateInspectTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) + + request = dlp.UpdateInspectTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectTemplate() + + client.update_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_inspect_template(request) + + +def test_update_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/inspectTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, args[1]) + + +def test_update_inspect_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetInspectTemplateRequest, + dict, +]) +def test_get_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_inspect_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_inspect_template_rest_required_fields(request_type=dlp.GetInspectTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_inspect_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_inspect_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_inspect_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetInspectTemplateRequest.pb(dlp.GetInspectTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) + + request = dlp.GetInspectTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectTemplate() + + client.get_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.GetInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_inspect_template(request) + + +def test_get_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/inspectTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, args[1]) + + +def test_get_inspect_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_inspect_template( + dlp.GetInspectTemplateRequest(), + name='name_value', + ) + + +def test_get_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListInspectTemplatesRequest, + dict, +]) +def test_list_inspect_templates_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInspectTemplatesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_inspect_templates(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_inspect_templates_rest_required_fields(request_type=dlp.ListInspectTemplatesRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_inspect_templates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_inspect_templates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListInspectTemplatesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_inspect_templates(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_inspect_templates_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_inspect_templates._get_unset_required_fields({}) + assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_inspect_templates_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_inspect_templates") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_inspect_templates") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListInspectTemplatesRequest.pb(dlp.ListInspectTemplatesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListInspectTemplatesResponse.to_json(dlp.ListInspectTemplatesResponse()) + + request = dlp.ListInspectTemplatesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListInspectTemplatesResponse() + + client.list_inspect_templates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_inspect_templates_rest_bad_request(transport: str = 'rest', request_type=dlp.ListInspectTemplatesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_inspect_templates(request) + + +def test_list_inspect_templates_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInspectTemplatesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_inspect_templates(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/inspectTemplates" % client.transport._host, args[1]) + + +def test_list_inspect_templates_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_inspect_templates_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListInspectTemplatesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'organizations/sample1'} + + pager = client.list_inspect_templates(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.InspectTemplate) + for i in results) + + pages = list(client.list_inspect_templates(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteInspectTemplateRequest, + dict, +]) +def test_delete_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_inspect_template(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_inspect_template_rest_required_fields(request_type=dlp.DeleteInspectTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_inspect_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_inspect_template") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteInspectTemplateRequest.pb(dlp.DeleteInspectTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteInspectTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_inspect_template(request) + + +def test_delete_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/inspectTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, args[1]) + + +def test_delete_inspect_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), + name='name_value', + ) + + +def test_delete_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateDeidentifyTemplateRequest, + dict, +]) +def test_create_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_deidentify_template_rest_required_fields(request_type=dlp.CreateDeidentifyTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_deidentify_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "deidentifyTemplate", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_deidentify_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_deidentify_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateDeidentifyTemplateRequest.pb(dlp.CreateDeidentifyTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) + + request = dlp.CreateDeidentifyTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyTemplate() + + client.create_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_deidentify_template(request) + + +def test_create_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/deidentifyTemplates" % client.transport._host, args[1]) + + +def test_create_deidentify_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + +def test_create_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateDeidentifyTemplateRequest, + dict, +]) +def test_update_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_deidentify_template_rest_required_fields(request_type=dlp.UpdateDeidentifyTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_deidentify_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_deidentify_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_deidentify_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateDeidentifyTemplateRequest.pb(dlp.UpdateDeidentifyTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) + + request = dlp.UpdateDeidentifyTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyTemplate() + + client.update_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_deidentify_template(request) + + +def test_update_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) + + +def test_update_deidentify_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetDeidentifyTemplateRequest, + dict, +]) +def test_get_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_deidentify_template_rest_required_fields(request_type=dlp.GetDeidentifyTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_deidentify_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_deidentify_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_deidentify_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetDeidentifyTemplateRequest.pb(dlp.GetDeidentifyTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) + + request = dlp.GetDeidentifyTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyTemplate() + + client.get_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.GetDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_deidentify_template(request) + + +def test_get_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) + + +def test_get_deidentify_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), + name='name_value', + ) + + +def test_get_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListDeidentifyTemplatesRequest, + dict, +]) +def test_list_deidentify_templates_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDeidentifyTemplatesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_deidentify_templates(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_deidentify_templates_rest_required_fields(request_type=dlp.ListDeidentifyTemplatesRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_deidentify_templates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_deidentify_templates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListDeidentifyTemplatesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_deidentify_templates(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_deidentify_templates_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_deidentify_templates._get_unset_required_fields({}) + assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_deidentify_templates_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_deidentify_templates") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_deidentify_templates") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListDeidentifyTemplatesRequest.pb(dlp.ListDeidentifyTemplatesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListDeidentifyTemplatesResponse.to_json(dlp.ListDeidentifyTemplatesResponse()) + + request = dlp.ListDeidentifyTemplatesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListDeidentifyTemplatesResponse() + + client.list_deidentify_templates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_deidentify_templates_rest_bad_request(transport: str = 'rest', request_type=dlp.ListDeidentifyTemplatesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_deidentify_templates(request) + + +def test_list_deidentify_templates_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDeidentifyTemplatesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_deidentify_templates(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/deidentifyTemplates" % client.transport._host, args[1]) + + +def test_list_deidentify_templates_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_deidentify_templates_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListDeidentifyTemplatesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'organizations/sample1'} + + pager = client.list_deidentify_templates(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) + for i in results) + + pages = list(client.list_deidentify_templates(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteDeidentifyTemplateRequest, + dict, +]) +def test_delete_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_deidentify_template_rest_required_fields(request_type=dlp.DeleteDeidentifyTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_deidentify_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_deidentify_template") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteDeidentifyTemplateRequest.pb(dlp.DeleteDeidentifyTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteDeidentifyTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_deidentify_template(request) + + +def test_delete_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) + + +def test_delete_deidentify_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), + name='name_value', + ) + + +def test_delete_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateJobTriggerRequest, + dict, +]) +def test_create_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_create_job_trigger_rest_required_fields(request_type=dlp.CreateJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "jobTrigger", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateJobTriggerRequest.pb(dlp.CreateJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) + + request = dlp.CreateJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.JobTrigger() + + client.create_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_job_trigger(request) + + +def test_create_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1]) + + +def test_create_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + +def test_create_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateJobTriggerRequest, + dict, +]) +def test_update_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_update_job_trigger_rest_required_fields(request_type=dlp.UpdateJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateJobTriggerRequest.pb(dlp.UpdateJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) + + request = dlp.UpdateJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.JobTrigger() + + client.update_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_job_trigger(request) + + +def test_update_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) + + +def test_update_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.HybridInspectJobTriggerRequest, + dict, +]) +def test_hybrid_inspect_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.hybrid_inspect_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_job_trigger_rest_required_fields(request_type=dlp.HybridInspectJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.hybrid_inspect_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_hybrid_inspect_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.hybrid_inspect_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_hybrid_inspect_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.HybridInspectJobTriggerRequest.pb(dlp.HybridInspectJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.HybridInspectResponse.to_json(dlp.HybridInspectResponse()) + + request = dlp.HybridInspectJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.HybridInspectResponse() + + client.hybrid_inspect_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_hybrid_inspect_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.HybridInspectJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.hybrid_inspect_job_trigger(request) + + +def test_hybrid_inspect_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.hybrid_inspect_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect" % client.transport._host, args[1]) + + +def test_hybrid_inspect_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), + name='name_value', + ) + + +def test_hybrid_inspect_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetJobTriggerRequest, + dict, +]) +def test_get_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_get_job_trigger_rest_required_fields(request_type=dlp.GetJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetJobTriggerRequest.pb(dlp.GetJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) + + request = dlp.GetJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.JobTrigger() + + client.get_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.GetJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_job_trigger(request) + + +def test_get_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) + + +def test_get_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job_trigger( + dlp.GetJobTriggerRequest(), + name='name_value', + ) + + +def test_get_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListJobTriggersRequest, + dict, +]) +def test_list_job_triggers_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListJobTriggersResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_job_triggers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_job_triggers_rest_required_fields(request_type=dlp.ListJobTriggersRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_job_triggers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_job_triggers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "location_id", "order_by", "page_size", "page_token", "type_", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListJobTriggersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_job_triggers(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_job_triggers_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_job_triggers._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "locationId", "orderBy", "pageSize", "pageToken", "type", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_job_triggers_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_job_triggers") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_job_triggers") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListJobTriggersRequest.pb(dlp.ListJobTriggersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListJobTriggersResponse.to_json(dlp.ListJobTriggersResponse()) + + request = dlp.ListJobTriggersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListJobTriggersResponse() + + client.list_job_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_job_triggers_rest_bad_request(transport: str = 'rest', request_type=dlp.ListJobTriggersRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_job_triggers(request) + + +def test_list_job_triggers_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListJobTriggersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_job_triggers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1]) + + +def test_list_job_triggers_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_job_triggers( + dlp.ListJobTriggersRequest(), + parent='parent_value', + ) + + +def test_list_job_triggers_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListJobTriggersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1'} + + pager = client.list_job_triggers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.JobTrigger) + for i in results) + + pages = list(client.list_job_triggers(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteJobTriggerRequest, + dict, +]) +def test_delete_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_job_trigger(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_trigger_rest_required_fields(request_type=dlp.DeleteJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_job_trigger") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteJobTriggerRequest.pb(dlp.DeleteJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_job_trigger(request) + + +def test_delete_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) + + +def test_delete_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), + name='name_value', + ) + + +def test_delete_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ActivateJobTriggerRequest, + dict, +]) +def test_activate_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.activate_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_activate_job_trigger_rest_required_fields(request_type=dlp.ActivateJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).activate_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).activate_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.activate_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_activate_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.activate_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_activate_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_activate_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_activate_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ActivateJobTriggerRequest.pb(dlp.ActivateJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) + + request = dlp.ActivateJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DlpJob() + + client.activate_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_activate_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.ActivateJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.activate_job_trigger(request) + + +def test_activate_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateDlpJobRequest, + dict, +]) +def test_create_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_dlp_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_create_dlp_job_rest_required_fields(request_type=dlp.CreateDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_dlp_job") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_dlp_job") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateDlpJobRequest.pb(dlp.CreateDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) + + request = dlp.CreateDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DlpJob() + + client.create_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_dlp_job(request) + + +def test_create_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1]) + + +def test_create_dlp_job_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + +def test_create_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListDlpJobsRequest, + dict, +]) +def test_list_dlp_jobs_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDlpJobsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_dlp_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_dlp_jobs_rest_required_fields(request_type=dlp.ListDlpJobsRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_dlp_jobs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_dlp_jobs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "location_id", "order_by", "page_size", "page_token", "type_", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListDlpJobsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_dlp_jobs(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_dlp_jobs_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_dlp_jobs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "locationId", "orderBy", "pageSize", "pageToken", "type", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_dlp_jobs_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_dlp_jobs") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_dlp_jobs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListDlpJobsRequest.pb(dlp.ListDlpJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListDlpJobsResponse.to_json(dlp.ListDlpJobsResponse()) + + request = dlp.ListDlpJobsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListDlpJobsResponse() + + client.list_dlp_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_dlp_jobs_rest_bad_request(transport: str = 'rest', request_type=dlp.ListDlpJobsRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_dlp_jobs(request) + + +def test_list_dlp_jobs_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDlpJobsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_dlp_jobs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1]) + + +def test_list_dlp_jobs_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), + parent='parent_value', + ) + + +def test_list_dlp_jobs_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListDlpJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1'} + + pager = client.list_dlp_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DlpJob) + for i in results) + + pages = list(client.list_dlp_jobs(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + dlp.GetDlpJobRequest, + dict, +]) +def test_get_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_dlp_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_get_dlp_job_rest_required_fields(request_type=dlp.GetDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_dlp_job") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_dlp_job") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetDlpJobRequest.pb(dlp.GetDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) + + request = dlp.GetDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DlpJob() + + client.get_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.GetDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_dlp_job(request) + + +def test_get_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/dlpJobs/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1]) + + +def test_get_dlp_job_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dlp_job( + dlp.GetDlpJobRequest(), + name='name_value', + ) + + +def test_get_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteDlpJobRequest, + dict, +]) +def test_delete_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_dlp_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dlp_job_rest_required_fields(request_type=dlp.DeleteDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_dlp_job") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteDlpJobRequest.pb(dlp.DeleteDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_dlp_job(request) + + +def test_delete_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/dlpJobs/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1]) + + +def test_delete_dlp_job_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), + name='name_value', + ) + + +def test_delete_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CancelDlpJobRequest, + dict, +]) +def test_cancel_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.cancel_dlp_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_dlp_job_rest_required_fields(request_type=dlp.CancelDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.cancel_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_cancel_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.cancel_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_cancel_dlp_job") as pre: + pre.assert_not_called() + pb_message = dlp.CancelDlpJobRequest.pb(dlp.CancelDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.CancelDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.cancel_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_cancel_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.CancelDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_dlp_job(request) + + +def test_cancel_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateStoredInfoTypeRequest, + dict, +]) +def test_create_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_create_stored_info_type_rest_required_fields(request_type=dlp.CreateStoredInfoTypeRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_stored_info_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "config", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_stored_info_type") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_stored_info_type") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateStoredInfoTypeRequest.pb(dlp.CreateStoredInfoTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) + + request = dlp.CreateStoredInfoTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.StoredInfoType() + + client.create_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_stored_info_type(request) + + +def test_create_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, args[1]) + + +def test_create_stored_info_type_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + +def test_create_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateStoredInfoTypeRequest, + dict, +]) +def test_update_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_update_stored_info_type_rest_required_fields(request_type=dlp.UpdateStoredInfoTypeRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_stored_info_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_stored_info_type") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_stored_info_type") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateStoredInfoTypeRequest.pb(dlp.UpdateStoredInfoTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) + + request = dlp.UpdateStoredInfoTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.StoredInfoType() + + client.update_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_stored_info_type(request) + + +def test_update_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) + + +def test_update_stored_info_type_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetStoredInfoTypeRequest, + dict, +]) +def test_get_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_get_stored_info_type_rest_required_fields(request_type=dlp.GetStoredInfoTypeRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_stored_info_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_stored_info_type") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_stored_info_type") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetStoredInfoTypeRequest.pb(dlp.GetStoredInfoTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) + + request = dlp.GetStoredInfoTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.StoredInfoType() + + client.get_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.GetStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_stored_info_type(request) + + +def test_get_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) + + +def test_get_stored_info_type_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), + name='name_value', + ) + + +def test_get_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListStoredInfoTypesRequest, + dict, +]) +def test_list_stored_info_types_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListStoredInfoTypesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_stored_info_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_stored_info_types_rest_required_fields(request_type=dlp.ListStoredInfoTypesRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_stored_info_types._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_stored_info_types._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListStoredInfoTypesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_stored_info_types(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_stored_info_types_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_stored_info_types._get_unset_required_fields({}) + assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_stored_info_types_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_stored_info_types") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_stored_info_types") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListStoredInfoTypesRequest.pb(dlp.ListStoredInfoTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListStoredInfoTypesResponse.to_json(dlp.ListStoredInfoTypesResponse()) + + request = dlp.ListStoredInfoTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListStoredInfoTypesResponse() + + client.list_stored_info_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_stored_info_types_rest_bad_request(transport: str = 'rest', request_type=dlp.ListStoredInfoTypesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_stored_info_types(request) + + +def test_list_stored_info_types_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListStoredInfoTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_stored_info_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, args[1]) + + +def test_list_stored_info_types_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), + parent='parent_value', + ) + + +def test_list_stored_info_types_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListStoredInfoTypesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'organizations/sample1'} + + pager = client.list_stored_info_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.StoredInfoType) + for i in results) + + pages = list(client.list_stored_info_types(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteStoredInfoTypeRequest, + dict, +]) +def test_delete_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_stored_info_type_rest_required_fields(request_type=dlp.DeleteStoredInfoTypeRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_stored_info_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_stored_info_type") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteStoredInfoTypeRequest.pb(dlp.DeleteStoredInfoTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteStoredInfoTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_stored_info_type(request) + + +def test_delete_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) + + +def test_delete_stored_info_type_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), + name='name_value', + ) + + +def test_delete_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.HybridInspectDlpJobRequest, + dict, +]) +def test_hybrid_inspect_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.hybrid_inspect_dlp_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_dlp_job_rest_required_fields(request_type=dlp.HybridInspectDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.hybrid_inspect_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_hybrid_inspect_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.hybrid_inspect_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_hybrid_inspect_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_dlp_job") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_dlp_job") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.HybridInspectDlpJobRequest.pb(dlp.HybridInspectDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.HybridInspectResponse.to_json(dlp.HybridInspectResponse()) + + request = dlp.HybridInspectDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.HybridInspectResponse() + + client.hybrid_inspect_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_hybrid_inspect_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.HybridInspectDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.hybrid_inspect_dlp_job(request) + + +def test_hybrid_inspect_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.hybrid_inspect_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect" % client.transport._host, args[1]) + + +def test_hybrid_inspect_dlp_job_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), + name='name_value', + ) + + +def test_hybrid_inspect_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.FinishDlpJobRequest, + dict, +]) +def test_finish_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.finish_dlp_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_finish_dlp_job_rest_required_fields(request_type=dlp.FinishDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).finish_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).finish_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.finish_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_finish_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.finish_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_finish_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_finish_dlp_job") as pre: + pre.assert_not_called() + pb_message = dlp.FinishDlpJobRequest.pb(dlp.FinishDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.FinishDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.finish_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_finish_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.FinishDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.finish_dlp_job(request) + + +def test_finish_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DlpServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DlpServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, + transports.DlpServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = DlpServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DlpServiceGrpcTransport, + ) + +def test_dlp_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DlpServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_dlp_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DlpServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'inspect_content', + 'redact_image', + 'deidentify_content', + 'reidentify_content', + 'list_info_types', + 'create_inspect_template', + 'update_inspect_template', + 'get_inspect_template', + 'list_inspect_templates', + 'delete_inspect_template', + 'create_deidentify_template', + 'update_deidentify_template', + 'get_deidentify_template', + 'list_deidentify_templates', + 'delete_deidentify_template', + 'create_job_trigger', + 'update_job_trigger', + 'hybrid_inspect_job_trigger', + 'get_job_trigger', + 'list_job_triggers', + 'delete_job_trigger', + 'activate_job_trigger', + 'create_dlp_job', + 'list_dlp_jobs', + 'get_dlp_job', + 'delete_dlp_job', + 'cancel_dlp_job', + 'create_stored_info_type', + 'update_stored_info_type', + 'get_stored_info_type', + 'list_stored_info_types', + 'delete_stored_info_type', + 'hybrid_inspect_dlp_job', + 'finish_dlp_job', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_dlp_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DlpServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_dlp_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DlpServiceTransport() + adc.assert_called_once() + + +def test_dlp_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DlpServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, + ], +) +def test_dlp_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, + transports.DlpServiceRestTransport, + ], +) +def test_dlp_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DlpServiceGrpcTransport, grpc_helpers), + (transports.DlpServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_dlp_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dlp.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dlp.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) +def test_dlp_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_dlp_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.DlpServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_dlp_service_host_no_port(transport_name): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dlp.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dlp.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_dlp_service_host_with_port(transport_name): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dlp.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dlp.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_dlp_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DlpServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DlpServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.inspect_content._session + session2 = client2.transport.inspect_content._session + assert session1 != session2 + session1 = client1.transport.redact_image._session + session2 = client2.transport.redact_image._session + assert session1 != session2 + session1 = client1.transport.deidentify_content._session + session2 = client2.transport.deidentify_content._session + assert session1 != session2 + session1 = client1.transport.reidentify_content._session + session2 = client2.transport.reidentify_content._session + assert session1 != session2 + session1 = client1.transport.list_info_types._session + session2 = client2.transport.list_info_types._session + assert session1 != session2 + session1 = client1.transport.create_inspect_template._session + session2 = client2.transport.create_inspect_template._session + assert session1 != session2 + session1 = client1.transport.update_inspect_template._session + session2 = client2.transport.update_inspect_template._session + assert session1 != session2 + session1 = client1.transport.get_inspect_template._session + session2 = client2.transport.get_inspect_template._session + assert session1 != session2 + session1 = client1.transport.list_inspect_templates._session + session2 = client2.transport.list_inspect_templates._session + assert session1 != session2 + session1 = client1.transport.delete_inspect_template._session + session2 = client2.transport.delete_inspect_template._session + assert session1 != session2 + session1 = client1.transport.create_deidentify_template._session + session2 = client2.transport.create_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.update_deidentify_template._session + session2 = client2.transport.update_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.get_deidentify_template._session + session2 = client2.transport.get_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.list_deidentify_templates._session + session2 = client2.transport.list_deidentify_templates._session + assert session1 != session2 + session1 = client1.transport.delete_deidentify_template._session + session2 = client2.transport.delete_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.create_job_trigger._session + session2 = client2.transport.create_job_trigger._session + assert session1 != session2 + session1 = client1.transport.update_job_trigger._session + session2 = client2.transport.update_job_trigger._session + assert session1 != session2 + session1 = client1.transport.hybrid_inspect_job_trigger._session + session2 = client2.transport.hybrid_inspect_job_trigger._session + assert session1 != session2 + session1 = client1.transport.get_job_trigger._session + session2 = client2.transport.get_job_trigger._session + assert session1 != session2 + session1 = client1.transport.list_job_triggers._session + session2 = client2.transport.list_job_triggers._session + assert session1 != session2 + session1 = client1.transport.delete_job_trigger._session + session2 = client2.transport.delete_job_trigger._session + assert session1 != session2 + session1 = client1.transport.activate_job_trigger._session + session2 = client2.transport.activate_job_trigger._session + assert session1 != session2 + session1 = client1.transport.create_dlp_job._session + session2 = client2.transport.create_dlp_job._session + assert session1 != session2 + session1 = client1.transport.list_dlp_jobs._session + session2 = client2.transport.list_dlp_jobs._session + assert session1 != session2 + session1 = client1.transport.get_dlp_job._session + session2 = client2.transport.get_dlp_job._session + assert session1 != session2 + session1 = client1.transport.delete_dlp_job._session + session2 = client2.transport.delete_dlp_job._session + assert session1 != session2 + session1 = client1.transport.cancel_dlp_job._session + session2 = client2.transport.cancel_dlp_job._session + assert session1 != session2 + session1 = client1.transport.create_stored_info_type._session + session2 = client2.transport.create_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.update_stored_info_type._session + session2 = client2.transport.update_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.get_stored_info_type._session + session2 = client2.transport.get_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.list_stored_info_types._session + session2 = client2.transport.list_stored_info_types._session + assert session1 != session2 + session1 = client1.transport.delete_stored_info_type._session + session2 = client2.transport.delete_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.hybrid_inspect_dlp_job._session + session2 = client2.transport.hybrid_inspect_dlp_job._session + assert session1 != session2 + session1 = client1.transport.finish_dlp_job._session + session2 = client2.transport.finish_dlp_job._session + assert session1 != session2 +def test_dlp_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DlpServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_dlp_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DlpServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) +def test_dlp_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) +def test_dlp_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_deidentify_template_path(): + organization = "squid" + deidentify_template = "clam" + expected = "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) + actual = DlpServiceClient.deidentify_template_path(organization, deidentify_template) + assert expected == actual + + +def test_parse_deidentify_template_path(): + expected = { + "organization": "whelk", + "deidentify_template": "octopus", + } + path = DlpServiceClient.deidentify_template_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_deidentify_template_path(path) + assert expected == actual + +def test_dlp_content_path(): + project = "oyster" + expected = "projects/{project}/dlpContent".format(project=project, ) + actual = DlpServiceClient.dlp_content_path(project) + assert expected == actual + + +def test_parse_dlp_content_path(): + expected = { + "project": "nudibranch", + } + path = DlpServiceClient.dlp_content_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_dlp_content_path(path) + assert expected == actual + +def test_dlp_job_path(): + project = "cuttlefish" + dlp_job = "mussel" + expected = "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) + actual = DlpServiceClient.dlp_job_path(project, dlp_job) + assert expected == actual + + +def test_parse_dlp_job_path(): + expected = { + "project": "winkle", + "dlp_job": "nautilus", + } + path = DlpServiceClient.dlp_job_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_dlp_job_path(path) + assert expected == actual + +def test_finding_path(): + project = "scallop" + location = "abalone" + finding = "squid" + expected = "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) + actual = DlpServiceClient.finding_path(project, location, finding) + assert expected == actual + + +def test_parse_finding_path(): + expected = { + "project": "clam", + "location": "whelk", + "finding": "octopus", + } + path = DlpServiceClient.finding_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_finding_path(path) + assert expected == actual + +def test_inspect_template_path(): + organization = "oyster" + inspect_template = "nudibranch" + expected = "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) + actual = DlpServiceClient.inspect_template_path(organization, inspect_template) + assert expected == actual + + +def test_parse_inspect_template_path(): + expected = { + "organization": "cuttlefish", + "inspect_template": "mussel", + } + path = DlpServiceClient.inspect_template_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_inspect_template_path(path) + assert expected == actual + +def test_job_trigger_path(): + project = "winkle" + job_trigger = "nautilus" + expected = "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) + actual = DlpServiceClient.job_trigger_path(project, job_trigger) + assert expected == actual + + +def test_parse_job_trigger_path(): + expected = { + "project": "scallop", + "job_trigger": "abalone", + } + path = DlpServiceClient.job_trigger_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_job_trigger_path(path) + assert expected == actual + +def test_stored_info_type_path(): + organization = "squid" + stored_info_type = "clam" + expected = "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) + actual = DlpServiceClient.stored_info_type_path(organization, stored_info_type) + assert expected == actual + + +def test_parse_stored_info_type_path(): + expected = { + "organization": "whelk", + "stored_info_type": "octopus", + } + path = DlpServiceClient.stored_info_type_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_stored_info_type_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DlpServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = DlpServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = DlpServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = DlpServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DlpServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = DlpServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = DlpServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = DlpServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DlpServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = DlpServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = DlpServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From c418fc87fc4370d8cda0da0d3b358e610e34a95c Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Fri, 24 Feb 2023 01:59:58 +0000 Subject: [PATCH 6/7] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../services/dlp_service/transports/rest.py | 62 +- google/cloud/dlp_v2/types/dlp.py | 2 + google/cloud/dlp_v2/types/storage.py | 2 + owl-bot-staging/v2/.coveragerc | 13 - owl-bot-staging/v2/.flake8 | 33 - owl-bot-staging/v2/MANIFEST.in | 2 - owl-bot-staging/v2/README.rst | 49 - owl-bot-staging/v2/docs/conf.py | 376 - .../v2/docs/dlp_v2/dlp_service.rst | 10 - owl-bot-staging/v2/docs/dlp_v2/services.rst | 6 - owl-bot-staging/v2/docs/dlp_v2/types.rst | 6 - owl-bot-staging/v2/docs/index.rst | 7 - .../v2/google/cloud/dlp/__init__.py | 395 - .../v2/google/cloud/dlp/gapic_version.py | 16 - owl-bot-staging/v2/google/cloud/dlp/py.typed | 2 - .../v2/google/cloud/dlp_v2/__init__.py | 396 - .../google/cloud/dlp_v2/gapic_metadata.json | 538 - .../v2/google/cloud/dlp_v2/gapic_version.py | 16 - .../v2/google/cloud/dlp_v2/py.typed | 2 - .../google/cloud/dlp_v2/services/__init__.py | 15 - .../dlp_v2/services/dlp_service/__init__.py | 22 - .../services/dlp_service/async_client.py | 4143 ---- .../dlp_v2/services/dlp_service/client.py | 4269 ---- .../dlp_v2/services/dlp_service/pagers.py | 623 - .../dlp_service/transports/__init__.py | 38 - .../services/dlp_service/transports/base.py | 752 - .../services/dlp_service/transports/grpc.py | 1262 -- .../dlp_service/transports/grpc_asyncio.py | 1261 -- .../services/dlp_service/transports/rest.py | 4325 ---- .../v2/google/cloud/dlp_v2/types/__init__.py | 390 - .../v2/google/cloud/dlp_v2/types/dlp.py | 8848 -------- .../v2/google/cloud/dlp_v2/types/storage.py | 1476 -- owl-bot-staging/v2/mypy.ini | 3 - owl-bot-staging/v2/noxfile.py | 184 - ..._dlp_service_activate_job_trigger_async.py | 52 - ...d_dlp_service_activate_job_trigger_sync.py | 52 - ...erated_dlp_service_cancel_dlp_job_async.py | 50 - ...nerated_dlp_service_cancel_dlp_job_sync.py | 50 - ...ervice_create_deidentify_template_async.py | 52 - ...service_create_deidentify_template_sync.py | 52 - ...erated_dlp_service_create_dlp_job_async.py | 52 - ...nerated_dlp_service_create_dlp_job_sync.py | 52 - ...p_service_create_inspect_template_async.py | 52 - ...lp_service_create_inspect_template_sync.py | 52 - ...ed_dlp_service_create_job_trigger_async.py | 56 - ...ted_dlp_service_create_job_trigger_sync.py | 56 - ...p_service_create_stored_info_type_async.py | 52 - ...lp_service_create_stored_info_type_sync.py | 52 - ...ed_dlp_service_deidentify_content_async.py | 51 - ...ted_dlp_service_deidentify_content_sync.py | 51 - ...ervice_delete_deidentify_template_async.py | 50 - ...service_delete_deidentify_template_sync.py | 50 - ...erated_dlp_service_delete_dlp_job_async.py | 50 - ...nerated_dlp_service_delete_dlp_job_sync.py | 50 - ...p_service_delete_inspect_template_async.py | 50 - ...lp_service_delete_inspect_template_sync.py | 50 - ...ed_dlp_service_delete_job_trigger_async.py | 50 - ...ted_dlp_service_delete_job_trigger_sync.py | 50 - ...p_service_delete_stored_info_type_async.py | 50 - ...lp_service_delete_stored_info_type_sync.py | 50 - ...erated_dlp_service_finish_dlp_job_async.py | 50 - ...nerated_dlp_service_finish_dlp_job_sync.py | 50 - ...p_service_get_deidentify_template_async.py | 52 - ...lp_service_get_deidentify_template_sync.py | 52 - ...generated_dlp_service_get_dlp_job_async.py | 52 - ..._generated_dlp_service_get_dlp_job_sync.py | 52 - ..._dlp_service_get_inspect_template_async.py | 52 - ...d_dlp_service_get_inspect_template_sync.py | 52 - ...rated_dlp_service_get_job_trigger_async.py | 52 - ...erated_dlp_service_get_job_trigger_sync.py | 52 - ..._dlp_service_get_stored_info_type_async.py | 52 - ...d_dlp_service_get_stored_info_type_sync.py | 52 - ...lp_service_hybrid_inspect_dlp_job_async.py | 52 - ...dlp_service_hybrid_inspect_dlp_job_sync.py | 52 - ...ervice_hybrid_inspect_job_trigger_async.py | 52 - ...service_hybrid_inspect_job_trigger_sync.py | 52 - ...rated_dlp_service_inspect_content_async.py | 51 - ...erated_dlp_service_inspect_content_sync.py | 51 - ...service_list_deidentify_templates_async.py | 53 - ..._service_list_deidentify_templates_sync.py | 53 - ...nerated_dlp_service_list_dlp_jobs_async.py | 53 - ...enerated_dlp_service_list_dlp_jobs_sync.py | 53 - ...rated_dlp_service_list_info_types_async.py | 51 - ...erated_dlp_service_list_info_types_sync.py | 51 - ...lp_service_list_inspect_templates_async.py | 53 - ...dlp_service_list_inspect_templates_sync.py | 53 - ...ted_dlp_service_list_job_triggers_async.py | 53 - ...ated_dlp_service_list_job_triggers_sync.py | 53 - ...lp_service_list_stored_info_types_async.py | 53 - ...dlp_service_list_stored_info_types_sync.py | 53 - ...enerated_dlp_service_redact_image_async.py | 51 - ...generated_dlp_service_redact_image_sync.py | 51 - ...ed_dlp_service_reidentify_content_async.py | 52 - ...ted_dlp_service_reidentify_content_sync.py | 52 - ...ervice_update_deidentify_template_async.py | 52 - ...service_update_deidentify_template_sync.py | 52 - ...p_service_update_inspect_template_async.py | 52 - ...lp_service_update_inspect_template_sync.py | 52 - ...ed_dlp_service_update_job_trigger_async.py | 52 - ...ted_dlp_service_update_job_trigger_sync.py | 52 - ...p_service_update_stored_info_type_async.py | 52 - ...lp_service_update_stored_info_type_sync.py | 52 - ...nippet_metadata_google.privacy.dlp.v2.json | 5503 ----- .../v2/scripts/fixup_dlp_v2_keywords.py | 209 - owl-bot-staging/v2/setup.py | 90 - .../v2/testing/constraints-3.10.txt | 6 - .../v2/testing/constraints-3.11.txt | 6 - .../v2/testing/constraints-3.12.txt | 6 - .../v2/testing/constraints-3.7.txt | 9 - .../v2/testing/constraints-3.8.txt | 6 - .../v2/testing/constraints-3.9.txt | 6 - owl-bot-staging/v2/tests/__init__.py | 16 - owl-bot-staging/v2/tests/unit/__init__.py | 16 - .../v2/tests/unit/gapic/__init__.py | 16 - .../v2/tests/unit/gapic/dlp_v2/__init__.py | 16 - .../unit/gapic/dlp_v2/test_dlp_service.py | 17404 ---------------- 116 files changed, 35 insertions(+), 56336 deletions(-) delete mode 100644 owl-bot-staging/v2/.coveragerc delete mode 100644 owl-bot-staging/v2/.flake8 delete mode 100644 owl-bot-staging/v2/MANIFEST.in delete mode 100644 owl-bot-staging/v2/README.rst delete mode 100644 owl-bot-staging/v2/docs/conf.py delete mode 100644 owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst delete mode 100644 owl-bot-staging/v2/docs/dlp_v2/services.rst delete mode 100644 owl-bot-staging/v2/docs/dlp_v2/types.rst delete mode 100644 owl-bot-staging/v2/docs/index.rst delete mode 100644 owl-bot-staging/v2/google/cloud/dlp/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp/gapic_version.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp/py.typed delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/py.typed delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py delete mode 100644 owl-bot-staging/v2/mypy.ini delete mode 100644 owl-bot-staging/v2/noxfile.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json delete mode 100644 owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py delete mode 100644 owl-bot-staging/v2/setup.py delete mode 100644 owl-bot-staging/v2/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/v2/tests/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/rest.py b/google/cloud/dlp_v2/services/dlp_service/transports/rest.py index ad8e9da0..99ca1aae 100644 --- a/google/cloud/dlp_v2/services/dlp_service/transports/rest.py +++ b/google/cloud/dlp_v2/services/dlp_service/transports/rest.py @@ -17,7 +17,7 @@ import dataclasses import json # type: ignore import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming @@ -1085,7 +1085,7 @@ class _ActivateJobTrigger(DlpServiceRestStub): def __hash__(self): return hash("ActivateJobTrigger") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1191,7 +1191,7 @@ class _CancelDlpJob(DlpServiceRestStub): def __hash__(self): return hash("CancelDlpJob") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1281,7 +1281,7 @@ class _CreateDeidentifyTemplate(DlpServiceRestStub): def __hash__(self): return hash("CreateDeidentifyTemplate") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1401,7 +1401,7 @@ class _CreateDlpJob(DlpServiceRestStub): def __hash__(self): return hash("CreateDlpJob") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1508,7 +1508,7 @@ class _CreateInspectTemplate(DlpServiceRestStub): def __hash__(self): return hash("CreateInspectTemplate") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1629,7 +1629,7 @@ class _CreateJobTrigger(DlpServiceRestStub): def __hash__(self): return hash("CreateJobTrigger") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1740,7 +1740,7 @@ class _CreateStoredInfoType(DlpServiceRestStub): def __hash__(self): return hash("CreateStoredInfoType") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1950,7 +1950,7 @@ class _DeleteDeidentifyTemplate(DlpServiceRestStub): def __hash__(self): return hash("DeleteDeidentifyTemplate") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2041,7 +2041,7 @@ class _DeleteDlpJob(DlpServiceRestStub): def __hash__(self): return hash("DeleteDlpJob") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2121,7 +2121,7 @@ class _DeleteInspectTemplate(DlpServiceRestStub): def __hash__(self): return hash("DeleteInspectTemplate") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2211,7 +2211,7 @@ class _DeleteJobTrigger(DlpServiceRestStub): def __hash__(self): return hash("DeleteJobTrigger") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2295,7 +2295,7 @@ class _DeleteStoredInfoType(DlpServiceRestStub): def __hash__(self): return hash("DeleteStoredInfoType") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2385,7 +2385,7 @@ class _FinishDlpJob(DlpServiceRestStub): def __hash__(self): return hash("FinishDlpJob") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2470,7 +2470,7 @@ class _GetDeidentifyTemplate(DlpServiceRestStub): def __hash__(self): return hash("GetDeidentifyTemplate") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2577,7 +2577,7 @@ class _GetDlpJob(DlpServiceRestStub): def __hash__(self): return hash("GetDlpJob") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2669,7 +2669,7 @@ class _GetInspectTemplate(DlpServiceRestStub): def __hash__(self): return hash("GetInspectTemplate") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2778,7 +2778,7 @@ class _GetJobTrigger(DlpServiceRestStub): def __hash__(self): return hash("GetJobTrigger") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2876,7 +2876,7 @@ class _GetStoredInfoType(DlpServiceRestStub): def __hash__(self): return hash("GetStoredInfoType") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2981,7 +2981,7 @@ class _HybridInspectDlpJob(DlpServiceRestStub): def __hash__(self): return hash("HybridInspectDlpJob") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -3082,7 +3082,7 @@ class _HybridInspectJobTrigger(DlpServiceRestStub): def __hash__(self): return hash("HybridInspectJobTrigger") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -3275,7 +3275,7 @@ class _ListDeidentifyTemplates(DlpServiceRestStub): def __hash__(self): return hash("ListDeidentifyTemplates") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -3379,7 +3379,7 @@ class _ListDlpJobs(DlpServiceRestStub): def __hash__(self): return hash("ListDlpJobs") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -3558,7 +3558,7 @@ class _ListInspectTemplates(DlpServiceRestStub): def __hash__(self): return hash("ListInspectTemplates") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -3662,7 +3662,7 @@ class _ListJobTriggers(DlpServiceRestStub): def __hash__(self): return hash("ListJobTriggers") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -3758,7 +3758,7 @@ class _ListStoredInfoTypes(DlpServiceRestStub): def __hash__(self): return hash("ListStoredInfoTypes") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -3954,7 +3954,7 @@ class _ReidentifyContent(DlpServiceRestStub): def __hash__(self): return hash("ReidentifyContent") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -4056,7 +4056,7 @@ class _UpdateDeidentifyTemplate(DlpServiceRestStub): def __hash__(self): return hash("UpdateDeidentifyTemplate") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -4176,7 +4176,7 @@ class _UpdateInspectTemplate(DlpServiceRestStub): def __hash__(self): return hash("UpdateInspectTemplate") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -4297,7 +4297,7 @@ class _UpdateJobTrigger(DlpServiceRestStub): def __hash__(self): return hash("UpdateJobTrigger") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -4408,7 +4408,7 @@ class _UpdateStoredInfoType(DlpServiceRestStub): def __hash__(self): return hash("UpdateStoredInfoType") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): diff --git a/google/cloud/dlp_v2/types/dlp.py b/google/cloud/dlp_v2/types/dlp.py index 0a85ce76..ebf542ec 100644 --- a/google/cloud/dlp_v2/types/dlp.py +++ b/google/cloud/dlp_v2/types/dlp.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import duration_pb2 # type: ignore diff --git a/google/cloud/dlp_v2/types/storage.py b/google/cloud/dlp_v2/types/storage.py index 15d8c78e..745d30c9 100644 --- a/google/cloud/dlp_v2/types/storage.py +++ b/google/cloud/dlp_v2/types/storage.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import timestamp_pb2 # type: ignore diff --git a/owl-bot-staging/v2/.coveragerc b/owl-bot-staging/v2/.coveragerc deleted file mode 100644 index 76798ec2..00000000 --- a/owl-bot-staging/v2/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/dlp/__init__.py - google/cloud/dlp/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/v2/.flake8 b/owl-bot-staging/v2/.flake8 deleted file mode 100644 index 29227d4c..00000000 --- a/owl-bot-staging/v2/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/v2/MANIFEST.in b/owl-bot-staging/v2/MANIFEST.in deleted file mode 100644 index 148f6bf3..00000000 --- a/owl-bot-staging/v2/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/dlp *.py -recursive-include google/cloud/dlp_v2 *.py diff --git a/owl-bot-staging/v2/README.rst b/owl-bot-staging/v2/README.rst deleted file mode 100644 index cf97c2e7..00000000 --- a/owl-bot-staging/v2/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Dlp API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Dlp API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v2/docs/conf.py b/owl-bot-staging/v2/docs/conf.py deleted file mode 100644 index cf2f570a..00000000 --- a/owl-bot-staging/v2/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-dlp documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-dlp" -copyright = u"2022, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-dlp-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-dlp.tex", - u"google-cloud-dlp Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-dlp", - u"Google Cloud Dlp Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-dlp", - u"google-cloud-dlp Documentation", - author, - "google-cloud-dlp", - "GAPIC library for Google Cloud Dlp API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst b/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst deleted file mode 100644 index 914da512..00000000 --- a/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -DlpService ----------------------------- - -.. automodule:: google.cloud.dlp_v2.services.dlp_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dlp_v2.services.dlp_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v2/docs/dlp_v2/services.rst b/owl-bot-staging/v2/docs/dlp_v2/services.rst deleted file mode 100644 index 864a8c83..00000000 --- a/owl-bot-staging/v2/docs/dlp_v2/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Dlp v2 API -==================================== -.. toctree:: - :maxdepth: 2 - - dlp_service diff --git a/owl-bot-staging/v2/docs/dlp_v2/types.rst b/owl-bot-staging/v2/docs/dlp_v2/types.rst deleted file mode 100644 index 5470b717..00000000 --- a/owl-bot-staging/v2/docs/dlp_v2/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Dlp v2 API -================================= - -.. automodule:: google.cloud.dlp_v2.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/v2/docs/index.rst b/owl-bot-staging/v2/docs/index.rst deleted file mode 100644 index d119451a..00000000 --- a/owl-bot-staging/v2/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - dlp_v2/services - dlp_v2/types diff --git a/owl-bot-staging/v2/google/cloud/dlp/__init__.py b/owl-bot-staging/v2/google/cloud/dlp/__init__.py deleted file mode 100644 index 3c1a800c..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp/__init__.py +++ /dev/null @@ -1,395 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.dlp import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.dlp_v2.services.dlp_service.client import DlpServiceClient -from google.cloud.dlp_v2.services.dlp_service.async_client import DlpServiceAsyncClient - -from google.cloud.dlp_v2.types.dlp import Action -from google.cloud.dlp_v2.types.dlp import ActivateJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import AnalyzeDataSourceRiskDetails -from google.cloud.dlp_v2.types.dlp import BoundingBox -from google.cloud.dlp_v2.types.dlp import BucketingConfig -from google.cloud.dlp_v2.types.dlp import ByteContentItem -from google.cloud.dlp_v2.types.dlp import CancelDlpJobRequest -from google.cloud.dlp_v2.types.dlp import CharacterMaskConfig -from google.cloud.dlp_v2.types.dlp import CharsToIgnore -from google.cloud.dlp_v2.types.dlp import Color -from google.cloud.dlp_v2.types.dlp import Container -from google.cloud.dlp_v2.types.dlp import ContentItem -from google.cloud.dlp_v2.types.dlp import ContentLocation -from google.cloud.dlp_v2.types.dlp import CreateDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import CreateDlpJobRequest -from google.cloud.dlp_v2.types.dlp import CreateInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import CreateJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import CreateStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import CryptoDeterministicConfig -from google.cloud.dlp_v2.types.dlp import CryptoHashConfig -from google.cloud.dlp_v2.types.dlp import CryptoKey -from google.cloud.dlp_v2.types.dlp import CryptoReplaceFfxFpeConfig -from google.cloud.dlp_v2.types.dlp import DataProfileAction -from google.cloud.dlp_v2.types.dlp import DataProfileConfigSnapshot -from google.cloud.dlp_v2.types.dlp import DataProfileJobConfig -from google.cloud.dlp_v2.types.dlp import DataProfileLocation -from google.cloud.dlp_v2.types.dlp import DataProfilePubSubCondition -from google.cloud.dlp_v2.types.dlp import DataProfilePubSubMessage -from google.cloud.dlp_v2.types.dlp import DataRiskLevel -from google.cloud.dlp_v2.types.dlp import DateShiftConfig -from google.cloud.dlp_v2.types.dlp import DateTime -from google.cloud.dlp_v2.types.dlp import DeidentifyConfig -from google.cloud.dlp_v2.types.dlp import DeidentifyContentRequest -from google.cloud.dlp_v2.types.dlp import DeidentifyContentResponse -from google.cloud.dlp_v2.types.dlp import DeidentifyTemplate -from google.cloud.dlp_v2.types.dlp import DeleteDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import DeleteDlpJobRequest -from google.cloud.dlp_v2.types.dlp import DeleteInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import DeleteJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import DeleteStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import DlpJob -from google.cloud.dlp_v2.types.dlp import DocumentLocation -from google.cloud.dlp_v2.types.dlp import Error -from google.cloud.dlp_v2.types.dlp import ExcludeByHotword -from google.cloud.dlp_v2.types.dlp import ExcludeInfoTypes -from google.cloud.dlp_v2.types.dlp import ExclusionRule -from google.cloud.dlp_v2.types.dlp import FieldTransformation -from google.cloud.dlp_v2.types.dlp import Finding -from google.cloud.dlp_v2.types.dlp import FinishDlpJobRequest -from google.cloud.dlp_v2.types.dlp import FixedSizeBucketingConfig -from google.cloud.dlp_v2.types.dlp import GetDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import GetDlpJobRequest -from google.cloud.dlp_v2.types.dlp import GetInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import GetJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import GetStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import HybridContentItem -from google.cloud.dlp_v2.types.dlp import HybridFindingDetails -from google.cloud.dlp_v2.types.dlp import HybridInspectDlpJobRequest -from google.cloud.dlp_v2.types.dlp import HybridInspectJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import HybridInspectResponse -from google.cloud.dlp_v2.types.dlp import HybridInspectStatistics -from google.cloud.dlp_v2.types.dlp import ImageLocation -from google.cloud.dlp_v2.types.dlp import ImageTransformations -from google.cloud.dlp_v2.types.dlp import InfoTypeCategory -from google.cloud.dlp_v2.types.dlp import InfoTypeDescription -from google.cloud.dlp_v2.types.dlp import InfoTypeStats -from google.cloud.dlp_v2.types.dlp import InfoTypeSummary -from google.cloud.dlp_v2.types.dlp import InfoTypeTransformations -from google.cloud.dlp_v2.types.dlp import InspectConfig -from google.cloud.dlp_v2.types.dlp import InspectContentRequest -from google.cloud.dlp_v2.types.dlp import InspectContentResponse -from google.cloud.dlp_v2.types.dlp import InspectDataSourceDetails -from google.cloud.dlp_v2.types.dlp import InspectionRule -from google.cloud.dlp_v2.types.dlp import InspectionRuleSet -from google.cloud.dlp_v2.types.dlp import InspectJobConfig -from google.cloud.dlp_v2.types.dlp import InspectResult -from google.cloud.dlp_v2.types.dlp import InspectTemplate -from google.cloud.dlp_v2.types.dlp import JobTrigger -from google.cloud.dlp_v2.types.dlp import KmsWrappedCryptoKey -from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryConfig -from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryStats -from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesRequest -from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesResponse -from google.cloud.dlp_v2.types.dlp import ListDlpJobsRequest -from google.cloud.dlp_v2.types.dlp import ListDlpJobsResponse -from google.cloud.dlp_v2.types.dlp import ListInfoTypesRequest -from google.cloud.dlp_v2.types.dlp import ListInfoTypesResponse -from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesRequest -from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesResponse -from google.cloud.dlp_v2.types.dlp import ListJobTriggersRequest -from google.cloud.dlp_v2.types.dlp import ListJobTriggersResponse -from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesRequest -from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesResponse -from google.cloud.dlp_v2.types.dlp import Location -from google.cloud.dlp_v2.types.dlp import Manual -from google.cloud.dlp_v2.types.dlp import MetadataLocation -from google.cloud.dlp_v2.types.dlp import OtherInfoTypeSummary -from google.cloud.dlp_v2.types.dlp import OutputStorageConfig -from google.cloud.dlp_v2.types.dlp import PrimitiveTransformation -from google.cloud.dlp_v2.types.dlp import PrivacyMetric -from google.cloud.dlp_v2.types.dlp import ProfileStatus -from google.cloud.dlp_v2.types.dlp import QuasiId -from google.cloud.dlp_v2.types.dlp import QuoteInfo -from google.cloud.dlp_v2.types.dlp import Range -from google.cloud.dlp_v2.types.dlp import RecordCondition -from google.cloud.dlp_v2.types.dlp import RecordLocation -from google.cloud.dlp_v2.types.dlp import RecordSuppression -from google.cloud.dlp_v2.types.dlp import RecordTransformation -from google.cloud.dlp_v2.types.dlp import RecordTransformations -from google.cloud.dlp_v2.types.dlp import RedactConfig -from google.cloud.dlp_v2.types.dlp import RedactImageRequest -from google.cloud.dlp_v2.types.dlp import RedactImageResponse -from google.cloud.dlp_v2.types.dlp import ReidentifyContentRequest -from google.cloud.dlp_v2.types.dlp import ReidentifyContentResponse -from google.cloud.dlp_v2.types.dlp import ReplaceDictionaryConfig -from google.cloud.dlp_v2.types.dlp import ReplaceValueConfig -from google.cloud.dlp_v2.types.dlp import ReplaceWithInfoTypeConfig -from google.cloud.dlp_v2.types.dlp import RiskAnalysisJobConfig -from google.cloud.dlp_v2.types.dlp import Schedule -from google.cloud.dlp_v2.types.dlp import StatisticalTable -from google.cloud.dlp_v2.types.dlp import StorageMetadataLabel -from google.cloud.dlp_v2.types.dlp import StoredInfoType -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeConfig -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeStats -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeVersion -from google.cloud.dlp_v2.types.dlp import Table -from google.cloud.dlp_v2.types.dlp import TableDataProfile -from google.cloud.dlp_v2.types.dlp import TableLocation -from google.cloud.dlp_v2.types.dlp import TimePartConfig -from google.cloud.dlp_v2.types.dlp import TransformationConfig -from google.cloud.dlp_v2.types.dlp import TransformationDescription -from google.cloud.dlp_v2.types.dlp import TransformationDetails -from google.cloud.dlp_v2.types.dlp import TransformationDetailsStorageConfig -from google.cloud.dlp_v2.types.dlp import TransformationErrorHandling -from google.cloud.dlp_v2.types.dlp import TransformationLocation -from google.cloud.dlp_v2.types.dlp import TransformationOverview -from google.cloud.dlp_v2.types.dlp import TransformationResultStatus -from google.cloud.dlp_v2.types.dlp import TransformationSummary -from google.cloud.dlp_v2.types.dlp import TransientCryptoKey -from google.cloud.dlp_v2.types.dlp import UnwrappedCryptoKey -from google.cloud.dlp_v2.types.dlp import UpdateDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import UpdateInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import UpdateJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import UpdateStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import Value -from google.cloud.dlp_v2.types.dlp import ValueFrequency -from google.cloud.dlp_v2.types.dlp import VersionDescription -from google.cloud.dlp_v2.types.dlp import ContentOption -from google.cloud.dlp_v2.types.dlp import DlpJobType -from google.cloud.dlp_v2.types.dlp import EncryptionStatus -from google.cloud.dlp_v2.types.dlp import InfoTypeSupportedBy -from google.cloud.dlp_v2.types.dlp import MatchingType -from google.cloud.dlp_v2.types.dlp import MetadataType -from google.cloud.dlp_v2.types.dlp import RelationalOperator -from google.cloud.dlp_v2.types.dlp import ResourceVisibility -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeState -from google.cloud.dlp_v2.types.dlp import TransformationContainerType -from google.cloud.dlp_v2.types.dlp import TransformationResultStatusType -from google.cloud.dlp_v2.types.dlp import TransformationType -from google.cloud.dlp_v2.types.storage import BigQueryField -from google.cloud.dlp_v2.types.storage import BigQueryKey -from google.cloud.dlp_v2.types.storage import BigQueryOptions -from google.cloud.dlp_v2.types.storage import BigQueryTable -from google.cloud.dlp_v2.types.storage import CloudStorageFileSet -from google.cloud.dlp_v2.types.storage import CloudStorageOptions -from google.cloud.dlp_v2.types.storage import CloudStoragePath -from google.cloud.dlp_v2.types.storage import CloudStorageRegexFileSet -from google.cloud.dlp_v2.types.storage import CustomInfoType -from google.cloud.dlp_v2.types.storage import DatastoreKey -from google.cloud.dlp_v2.types.storage import DatastoreOptions -from google.cloud.dlp_v2.types.storage import EntityId -from google.cloud.dlp_v2.types.storage import FieldId -from google.cloud.dlp_v2.types.storage import HybridOptions -from google.cloud.dlp_v2.types.storage import InfoType -from google.cloud.dlp_v2.types.storage import Key -from google.cloud.dlp_v2.types.storage import KindExpression -from google.cloud.dlp_v2.types.storage import PartitionId -from google.cloud.dlp_v2.types.storage import RecordKey -from google.cloud.dlp_v2.types.storage import SensitivityScore -from google.cloud.dlp_v2.types.storage import StorageConfig -from google.cloud.dlp_v2.types.storage import StoredType -from google.cloud.dlp_v2.types.storage import TableOptions -from google.cloud.dlp_v2.types.storage import FileType -from google.cloud.dlp_v2.types.storage import Likelihood - -__all__ = ('DlpServiceClient', - 'DlpServiceAsyncClient', - 'Action', - 'ActivateJobTriggerRequest', - 'AnalyzeDataSourceRiskDetails', - 'BoundingBox', - 'BucketingConfig', - 'ByteContentItem', - 'CancelDlpJobRequest', - 'CharacterMaskConfig', - 'CharsToIgnore', - 'Color', - 'Container', - 'ContentItem', - 'ContentLocation', - 'CreateDeidentifyTemplateRequest', - 'CreateDlpJobRequest', - 'CreateInspectTemplateRequest', - 'CreateJobTriggerRequest', - 'CreateStoredInfoTypeRequest', - 'CryptoDeterministicConfig', - 'CryptoHashConfig', - 'CryptoKey', - 'CryptoReplaceFfxFpeConfig', - 'DataProfileAction', - 'DataProfileConfigSnapshot', - 'DataProfileJobConfig', - 'DataProfileLocation', - 'DataProfilePubSubCondition', - 'DataProfilePubSubMessage', - 'DataRiskLevel', - 'DateShiftConfig', - 'DateTime', - 'DeidentifyConfig', - 'DeidentifyContentRequest', - 'DeidentifyContentResponse', - 'DeidentifyTemplate', - 'DeleteDeidentifyTemplateRequest', - 'DeleteDlpJobRequest', - 'DeleteInspectTemplateRequest', - 'DeleteJobTriggerRequest', - 'DeleteStoredInfoTypeRequest', - 'DlpJob', - 'DocumentLocation', - 'Error', - 'ExcludeByHotword', - 'ExcludeInfoTypes', - 'ExclusionRule', - 'FieldTransformation', - 'Finding', - 'FinishDlpJobRequest', - 'FixedSizeBucketingConfig', - 'GetDeidentifyTemplateRequest', - 'GetDlpJobRequest', - 'GetInspectTemplateRequest', - 'GetJobTriggerRequest', - 'GetStoredInfoTypeRequest', - 'HybridContentItem', - 'HybridFindingDetails', - 'HybridInspectDlpJobRequest', - 'HybridInspectJobTriggerRequest', - 'HybridInspectResponse', - 'HybridInspectStatistics', - 'ImageLocation', - 'ImageTransformations', - 'InfoTypeCategory', - 'InfoTypeDescription', - 'InfoTypeStats', - 'InfoTypeSummary', - 'InfoTypeTransformations', - 'InspectConfig', - 'InspectContentRequest', - 'InspectContentResponse', - 'InspectDataSourceDetails', - 'InspectionRule', - 'InspectionRuleSet', - 'InspectJobConfig', - 'InspectResult', - 'InspectTemplate', - 'JobTrigger', - 'KmsWrappedCryptoKey', - 'LargeCustomDictionaryConfig', - 'LargeCustomDictionaryStats', - 'ListDeidentifyTemplatesRequest', - 'ListDeidentifyTemplatesResponse', - 'ListDlpJobsRequest', - 'ListDlpJobsResponse', - 'ListInfoTypesRequest', - 'ListInfoTypesResponse', - 'ListInspectTemplatesRequest', - 'ListInspectTemplatesResponse', - 'ListJobTriggersRequest', - 'ListJobTriggersResponse', - 'ListStoredInfoTypesRequest', - 'ListStoredInfoTypesResponse', - 'Location', - 'Manual', - 'MetadataLocation', - 'OtherInfoTypeSummary', - 'OutputStorageConfig', - 'PrimitiveTransformation', - 'PrivacyMetric', - 'ProfileStatus', - 'QuasiId', - 'QuoteInfo', - 'Range', - 'RecordCondition', - 'RecordLocation', - 'RecordSuppression', - 'RecordTransformation', - 'RecordTransformations', - 'RedactConfig', - 'RedactImageRequest', - 'RedactImageResponse', - 'ReidentifyContentRequest', - 'ReidentifyContentResponse', - 'ReplaceDictionaryConfig', - 'ReplaceValueConfig', - 'ReplaceWithInfoTypeConfig', - 'RiskAnalysisJobConfig', - 'Schedule', - 'StatisticalTable', - 'StorageMetadataLabel', - 'StoredInfoType', - 'StoredInfoTypeConfig', - 'StoredInfoTypeStats', - 'StoredInfoTypeVersion', - 'Table', - 'TableDataProfile', - 'TableLocation', - 'TimePartConfig', - 'TransformationConfig', - 'TransformationDescription', - 'TransformationDetails', - 'TransformationDetailsStorageConfig', - 'TransformationErrorHandling', - 'TransformationLocation', - 'TransformationOverview', - 'TransformationResultStatus', - 'TransformationSummary', - 'TransientCryptoKey', - 'UnwrappedCryptoKey', - 'UpdateDeidentifyTemplateRequest', - 'UpdateInspectTemplateRequest', - 'UpdateJobTriggerRequest', - 'UpdateStoredInfoTypeRequest', - 'Value', - 'ValueFrequency', - 'VersionDescription', - 'ContentOption', - 'DlpJobType', - 'EncryptionStatus', - 'InfoTypeSupportedBy', - 'MatchingType', - 'MetadataType', - 'RelationalOperator', - 'ResourceVisibility', - 'StoredInfoTypeState', - 'TransformationContainerType', - 'TransformationResultStatusType', - 'TransformationType', - 'BigQueryField', - 'BigQueryKey', - 'BigQueryOptions', - 'BigQueryTable', - 'CloudStorageFileSet', - 'CloudStorageOptions', - 'CloudStoragePath', - 'CloudStorageRegexFileSet', - 'CustomInfoType', - 'DatastoreKey', - 'DatastoreOptions', - 'EntityId', - 'FieldId', - 'HybridOptions', - 'InfoType', - 'Key', - 'KindExpression', - 'PartitionId', - 'RecordKey', - 'SensitivityScore', - 'StorageConfig', - 'StoredType', - 'TableOptions', - 'FileType', - 'Likelihood', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp/gapic_version.py b/owl-bot-staging/v2/google/cloud/dlp/gapic_version.py deleted file mode 100644 index 405b1ceb..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/dlp/py.typed b/owl-bot-staging/v2/google/cloud/dlp/py.typed deleted file mode 100644 index 23d89ef3..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py deleted file mode 100644 index 8397a3ad..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py +++ /dev/null @@ -1,396 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.dlp_v2 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.dlp_service import DlpServiceClient -from .services.dlp_service import DlpServiceAsyncClient - -from .types.dlp import Action -from .types.dlp import ActivateJobTriggerRequest -from .types.dlp import AnalyzeDataSourceRiskDetails -from .types.dlp import BoundingBox -from .types.dlp import BucketingConfig -from .types.dlp import ByteContentItem -from .types.dlp import CancelDlpJobRequest -from .types.dlp import CharacterMaskConfig -from .types.dlp import CharsToIgnore -from .types.dlp import Color -from .types.dlp import Container -from .types.dlp import ContentItem -from .types.dlp import ContentLocation -from .types.dlp import CreateDeidentifyTemplateRequest -from .types.dlp import CreateDlpJobRequest -from .types.dlp import CreateInspectTemplateRequest -from .types.dlp import CreateJobTriggerRequest -from .types.dlp import CreateStoredInfoTypeRequest -from .types.dlp import CryptoDeterministicConfig -from .types.dlp import CryptoHashConfig -from .types.dlp import CryptoKey -from .types.dlp import CryptoReplaceFfxFpeConfig -from .types.dlp import DataProfileAction -from .types.dlp import DataProfileConfigSnapshot -from .types.dlp import DataProfileJobConfig -from .types.dlp import DataProfileLocation -from .types.dlp import DataProfilePubSubCondition -from .types.dlp import DataProfilePubSubMessage -from .types.dlp import DataRiskLevel -from .types.dlp import DateShiftConfig -from .types.dlp import DateTime -from .types.dlp import DeidentifyConfig -from .types.dlp import DeidentifyContentRequest -from .types.dlp import DeidentifyContentResponse -from .types.dlp import DeidentifyTemplate -from .types.dlp import DeleteDeidentifyTemplateRequest -from .types.dlp import DeleteDlpJobRequest -from .types.dlp import DeleteInspectTemplateRequest -from .types.dlp import DeleteJobTriggerRequest -from .types.dlp import DeleteStoredInfoTypeRequest -from .types.dlp import DlpJob -from .types.dlp import DocumentLocation -from .types.dlp import Error -from .types.dlp import ExcludeByHotword -from .types.dlp import ExcludeInfoTypes -from .types.dlp import ExclusionRule -from .types.dlp import FieldTransformation -from .types.dlp import Finding -from .types.dlp import FinishDlpJobRequest -from .types.dlp import FixedSizeBucketingConfig -from .types.dlp import GetDeidentifyTemplateRequest -from .types.dlp import GetDlpJobRequest -from .types.dlp import GetInspectTemplateRequest -from .types.dlp import GetJobTriggerRequest -from .types.dlp import GetStoredInfoTypeRequest -from .types.dlp import HybridContentItem -from .types.dlp import HybridFindingDetails -from .types.dlp import HybridInspectDlpJobRequest -from .types.dlp import HybridInspectJobTriggerRequest -from .types.dlp import HybridInspectResponse -from .types.dlp import HybridInspectStatistics -from .types.dlp import ImageLocation -from .types.dlp import ImageTransformations -from .types.dlp import InfoTypeCategory -from .types.dlp import InfoTypeDescription -from .types.dlp import InfoTypeStats -from .types.dlp import InfoTypeSummary -from .types.dlp import InfoTypeTransformations -from .types.dlp import InspectConfig -from .types.dlp import InspectContentRequest -from .types.dlp import InspectContentResponse -from .types.dlp import InspectDataSourceDetails -from .types.dlp import InspectionRule -from .types.dlp import InspectionRuleSet -from .types.dlp import InspectJobConfig -from .types.dlp import InspectResult -from .types.dlp import InspectTemplate -from .types.dlp import JobTrigger -from .types.dlp import KmsWrappedCryptoKey -from .types.dlp import LargeCustomDictionaryConfig -from .types.dlp import LargeCustomDictionaryStats -from .types.dlp import ListDeidentifyTemplatesRequest -from .types.dlp import ListDeidentifyTemplatesResponse -from .types.dlp import ListDlpJobsRequest -from .types.dlp import ListDlpJobsResponse -from .types.dlp import ListInfoTypesRequest -from .types.dlp import ListInfoTypesResponse -from .types.dlp import ListInspectTemplatesRequest -from .types.dlp import ListInspectTemplatesResponse -from .types.dlp import ListJobTriggersRequest -from .types.dlp import ListJobTriggersResponse -from .types.dlp import ListStoredInfoTypesRequest -from .types.dlp import ListStoredInfoTypesResponse -from .types.dlp import Location -from .types.dlp import Manual -from .types.dlp import MetadataLocation -from .types.dlp import OtherInfoTypeSummary -from .types.dlp import OutputStorageConfig -from .types.dlp import PrimitiveTransformation -from .types.dlp import PrivacyMetric -from .types.dlp import ProfileStatus -from .types.dlp import QuasiId -from .types.dlp import QuoteInfo -from .types.dlp import Range -from .types.dlp import RecordCondition -from .types.dlp import RecordLocation -from .types.dlp import RecordSuppression -from .types.dlp import RecordTransformation -from .types.dlp import RecordTransformations -from .types.dlp import RedactConfig -from .types.dlp import RedactImageRequest -from .types.dlp import RedactImageResponse -from .types.dlp import ReidentifyContentRequest -from .types.dlp import ReidentifyContentResponse -from .types.dlp import ReplaceDictionaryConfig -from .types.dlp import ReplaceValueConfig -from .types.dlp import ReplaceWithInfoTypeConfig -from .types.dlp import RiskAnalysisJobConfig -from .types.dlp import Schedule -from .types.dlp import StatisticalTable -from .types.dlp import StorageMetadataLabel -from .types.dlp import StoredInfoType -from .types.dlp import StoredInfoTypeConfig -from .types.dlp import StoredInfoTypeStats -from .types.dlp import StoredInfoTypeVersion -from .types.dlp import Table -from .types.dlp import TableDataProfile -from .types.dlp import TableLocation -from .types.dlp import TimePartConfig -from .types.dlp import TransformationConfig -from .types.dlp import TransformationDescription -from .types.dlp import TransformationDetails -from .types.dlp import TransformationDetailsStorageConfig -from .types.dlp import TransformationErrorHandling -from .types.dlp import TransformationLocation -from .types.dlp import TransformationOverview -from .types.dlp import TransformationResultStatus -from .types.dlp import TransformationSummary -from .types.dlp import TransientCryptoKey -from .types.dlp import UnwrappedCryptoKey -from .types.dlp import UpdateDeidentifyTemplateRequest -from .types.dlp import UpdateInspectTemplateRequest -from .types.dlp import UpdateJobTriggerRequest -from .types.dlp import UpdateStoredInfoTypeRequest -from .types.dlp import Value -from .types.dlp import ValueFrequency -from .types.dlp import VersionDescription -from .types.dlp import ContentOption -from .types.dlp import DlpJobType -from .types.dlp import EncryptionStatus -from .types.dlp import InfoTypeSupportedBy -from .types.dlp import MatchingType -from .types.dlp import MetadataType -from .types.dlp import RelationalOperator -from .types.dlp import ResourceVisibility -from .types.dlp import StoredInfoTypeState -from .types.dlp import TransformationContainerType -from .types.dlp import TransformationResultStatusType -from .types.dlp import TransformationType -from .types.storage import BigQueryField -from .types.storage import BigQueryKey -from .types.storage import BigQueryOptions -from .types.storage import BigQueryTable -from .types.storage import CloudStorageFileSet -from .types.storage import CloudStorageOptions -from .types.storage import CloudStoragePath -from .types.storage import CloudStorageRegexFileSet -from .types.storage import CustomInfoType -from .types.storage import DatastoreKey -from .types.storage import DatastoreOptions -from .types.storage import EntityId -from .types.storage import FieldId -from .types.storage import HybridOptions -from .types.storage import InfoType -from .types.storage import Key -from .types.storage import KindExpression -from .types.storage import PartitionId -from .types.storage import RecordKey -from .types.storage import SensitivityScore -from .types.storage import StorageConfig -from .types.storage import StoredType -from .types.storage import TableOptions -from .types.storage import FileType -from .types.storage import Likelihood - -__all__ = ( - 'DlpServiceAsyncClient', -'Action', -'ActivateJobTriggerRequest', -'AnalyzeDataSourceRiskDetails', -'BigQueryField', -'BigQueryKey', -'BigQueryOptions', -'BigQueryTable', -'BoundingBox', -'BucketingConfig', -'ByteContentItem', -'CancelDlpJobRequest', -'CharacterMaskConfig', -'CharsToIgnore', -'CloudStorageFileSet', -'CloudStorageOptions', -'CloudStoragePath', -'CloudStorageRegexFileSet', -'Color', -'Container', -'ContentItem', -'ContentLocation', -'ContentOption', -'CreateDeidentifyTemplateRequest', -'CreateDlpJobRequest', -'CreateInspectTemplateRequest', -'CreateJobTriggerRequest', -'CreateStoredInfoTypeRequest', -'CryptoDeterministicConfig', -'CryptoHashConfig', -'CryptoKey', -'CryptoReplaceFfxFpeConfig', -'CustomInfoType', -'DataProfileAction', -'DataProfileConfigSnapshot', -'DataProfileJobConfig', -'DataProfileLocation', -'DataProfilePubSubCondition', -'DataProfilePubSubMessage', -'DataRiskLevel', -'DatastoreKey', -'DatastoreOptions', -'DateShiftConfig', -'DateTime', -'DeidentifyConfig', -'DeidentifyContentRequest', -'DeidentifyContentResponse', -'DeidentifyTemplate', -'DeleteDeidentifyTemplateRequest', -'DeleteDlpJobRequest', -'DeleteInspectTemplateRequest', -'DeleteJobTriggerRequest', -'DeleteStoredInfoTypeRequest', -'DlpJob', -'DlpJobType', -'DlpServiceClient', -'DocumentLocation', -'EncryptionStatus', -'EntityId', -'Error', -'ExcludeByHotword', -'ExcludeInfoTypes', -'ExclusionRule', -'FieldId', -'FieldTransformation', -'FileType', -'Finding', -'FinishDlpJobRequest', -'FixedSizeBucketingConfig', -'GetDeidentifyTemplateRequest', -'GetDlpJobRequest', -'GetInspectTemplateRequest', -'GetJobTriggerRequest', -'GetStoredInfoTypeRequest', -'HybridContentItem', -'HybridFindingDetails', -'HybridInspectDlpJobRequest', -'HybridInspectJobTriggerRequest', -'HybridInspectResponse', -'HybridInspectStatistics', -'HybridOptions', -'ImageLocation', -'ImageTransformations', -'InfoType', -'InfoTypeCategory', -'InfoTypeDescription', -'InfoTypeStats', -'InfoTypeSummary', -'InfoTypeSupportedBy', -'InfoTypeTransformations', -'InspectConfig', -'InspectContentRequest', -'InspectContentResponse', -'InspectDataSourceDetails', -'InspectJobConfig', -'InspectResult', -'InspectTemplate', -'InspectionRule', -'InspectionRuleSet', -'JobTrigger', -'Key', -'KindExpression', -'KmsWrappedCryptoKey', -'LargeCustomDictionaryConfig', -'LargeCustomDictionaryStats', -'Likelihood', -'ListDeidentifyTemplatesRequest', -'ListDeidentifyTemplatesResponse', -'ListDlpJobsRequest', -'ListDlpJobsResponse', -'ListInfoTypesRequest', -'ListInfoTypesResponse', -'ListInspectTemplatesRequest', -'ListInspectTemplatesResponse', -'ListJobTriggersRequest', -'ListJobTriggersResponse', -'ListStoredInfoTypesRequest', -'ListStoredInfoTypesResponse', -'Location', -'Manual', -'MatchingType', -'MetadataLocation', -'MetadataType', -'OtherInfoTypeSummary', -'OutputStorageConfig', -'PartitionId', -'PrimitiveTransformation', -'PrivacyMetric', -'ProfileStatus', -'QuasiId', -'QuoteInfo', -'Range', -'RecordCondition', -'RecordKey', -'RecordLocation', -'RecordSuppression', -'RecordTransformation', -'RecordTransformations', -'RedactConfig', -'RedactImageRequest', -'RedactImageResponse', -'ReidentifyContentRequest', -'ReidentifyContentResponse', -'RelationalOperator', -'ReplaceDictionaryConfig', -'ReplaceValueConfig', -'ReplaceWithInfoTypeConfig', -'ResourceVisibility', -'RiskAnalysisJobConfig', -'Schedule', -'SensitivityScore', -'StatisticalTable', -'StorageConfig', -'StorageMetadataLabel', -'StoredInfoType', -'StoredInfoTypeConfig', -'StoredInfoTypeState', -'StoredInfoTypeStats', -'StoredInfoTypeVersion', -'StoredType', -'Table', -'TableDataProfile', -'TableLocation', -'TableOptions', -'TimePartConfig', -'TransformationConfig', -'TransformationContainerType', -'TransformationDescription', -'TransformationDetails', -'TransformationDetailsStorageConfig', -'TransformationErrorHandling', -'TransformationLocation', -'TransformationOverview', -'TransformationResultStatus', -'TransformationResultStatusType', -'TransformationSummary', -'TransformationType', -'TransientCryptoKey', -'UnwrappedCryptoKey', -'UpdateDeidentifyTemplateRequest', -'UpdateInspectTemplateRequest', -'UpdateJobTriggerRequest', -'UpdateStoredInfoTypeRequest', -'Value', -'ValueFrequency', -'VersionDescription', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json deleted file mode 100644 index 634002d4..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json +++ /dev/null @@ -1,538 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.dlp_v2", - "protoPackage": "google.privacy.dlp.v2", - "schema": "1.0", - "services": { - "DlpService": { - "clients": { - "grpc": { - "libraryClient": "DlpServiceClient", - "rpcs": { - "ActivateJobTrigger": { - "methods": [ - "activate_job_trigger" - ] - }, - "CancelDlpJob": { - "methods": [ - "cancel_dlp_job" - ] - }, - "CreateDeidentifyTemplate": { - "methods": [ - "create_deidentify_template" - ] - }, - "CreateDlpJob": { - "methods": [ - "create_dlp_job" - ] - }, - "CreateInspectTemplate": { - "methods": [ - "create_inspect_template" - ] - }, - "CreateJobTrigger": { - "methods": [ - "create_job_trigger" - ] - }, - "CreateStoredInfoType": { - "methods": [ - "create_stored_info_type" - ] - }, - "DeidentifyContent": { - "methods": [ - "deidentify_content" - ] - }, - "DeleteDeidentifyTemplate": { - "methods": [ - "delete_deidentify_template" - ] - }, - "DeleteDlpJob": { - "methods": [ - "delete_dlp_job" - ] - }, - "DeleteInspectTemplate": { - "methods": [ - "delete_inspect_template" - ] - }, - "DeleteJobTrigger": { - "methods": [ - "delete_job_trigger" - ] - }, - "DeleteStoredInfoType": { - "methods": [ - "delete_stored_info_type" - ] - }, - "FinishDlpJob": { - "methods": [ - "finish_dlp_job" - ] - }, - "GetDeidentifyTemplate": { - "methods": [ - "get_deidentify_template" - ] - }, - "GetDlpJob": { - "methods": [ - "get_dlp_job" - ] - }, - "GetInspectTemplate": { - "methods": [ - "get_inspect_template" - ] - }, - "GetJobTrigger": { - "methods": [ - "get_job_trigger" - ] - }, - "GetStoredInfoType": { - "methods": [ - "get_stored_info_type" - ] - }, - "HybridInspectDlpJob": { - "methods": [ - "hybrid_inspect_dlp_job" - ] - }, - "HybridInspectJobTrigger": { - "methods": [ - "hybrid_inspect_job_trigger" - ] - }, - "InspectContent": { - "methods": [ - "inspect_content" - ] - }, - "ListDeidentifyTemplates": { - "methods": [ - "list_deidentify_templates" - ] - }, - "ListDlpJobs": { - "methods": [ - "list_dlp_jobs" - ] - }, - "ListInfoTypes": { - "methods": [ - "list_info_types" - ] - }, - "ListInspectTemplates": { - "methods": [ - "list_inspect_templates" - ] - }, - "ListJobTriggers": { - "methods": [ - "list_job_triggers" - ] - }, - "ListStoredInfoTypes": { - "methods": [ - "list_stored_info_types" - ] - }, - "RedactImage": { - "methods": [ - "redact_image" - ] - }, - "ReidentifyContent": { - "methods": [ - "reidentify_content" - ] - }, - "UpdateDeidentifyTemplate": { - "methods": [ - "update_deidentify_template" - ] - }, - "UpdateInspectTemplate": { - "methods": [ - "update_inspect_template" - ] - }, - "UpdateJobTrigger": { - "methods": [ - "update_job_trigger" - ] - }, - "UpdateStoredInfoType": { - "methods": [ - "update_stored_info_type" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DlpServiceAsyncClient", - "rpcs": { - "ActivateJobTrigger": { - "methods": [ - "activate_job_trigger" - ] - }, - "CancelDlpJob": { - "methods": [ - "cancel_dlp_job" - ] - }, - "CreateDeidentifyTemplate": { - "methods": [ - "create_deidentify_template" - ] - }, - "CreateDlpJob": { - "methods": [ - "create_dlp_job" - ] - }, - "CreateInspectTemplate": { - "methods": [ - "create_inspect_template" - ] - }, - "CreateJobTrigger": { - "methods": [ - "create_job_trigger" - ] - }, - "CreateStoredInfoType": { - "methods": [ - "create_stored_info_type" - ] - }, - "DeidentifyContent": { - "methods": [ - "deidentify_content" - ] - }, - "DeleteDeidentifyTemplate": { - "methods": [ - "delete_deidentify_template" - ] - }, - "DeleteDlpJob": { - "methods": [ - "delete_dlp_job" - ] - }, - "DeleteInspectTemplate": { - "methods": [ - "delete_inspect_template" - ] - }, - "DeleteJobTrigger": { - "methods": [ - "delete_job_trigger" - ] - }, - "DeleteStoredInfoType": { - "methods": [ - "delete_stored_info_type" - ] - }, - "FinishDlpJob": { - "methods": [ - "finish_dlp_job" - ] - }, - "GetDeidentifyTemplate": { - "methods": [ - "get_deidentify_template" - ] - }, - "GetDlpJob": { - "methods": [ - "get_dlp_job" - ] - }, - "GetInspectTemplate": { - "methods": [ - "get_inspect_template" - ] - }, - "GetJobTrigger": { - "methods": [ - "get_job_trigger" - ] - }, - "GetStoredInfoType": { - "methods": [ - "get_stored_info_type" - ] - }, - "HybridInspectDlpJob": { - "methods": [ - "hybrid_inspect_dlp_job" - ] - }, - "HybridInspectJobTrigger": { - "methods": [ - "hybrid_inspect_job_trigger" - ] - }, - "InspectContent": { - "methods": [ - "inspect_content" - ] - }, - "ListDeidentifyTemplates": { - "methods": [ - "list_deidentify_templates" - ] - }, - "ListDlpJobs": { - "methods": [ - "list_dlp_jobs" - ] - }, - "ListInfoTypes": { - "methods": [ - "list_info_types" - ] - }, - "ListInspectTemplates": { - "methods": [ - "list_inspect_templates" - ] - }, - "ListJobTriggers": { - "methods": [ - "list_job_triggers" - ] - }, - "ListStoredInfoTypes": { - "methods": [ - "list_stored_info_types" - ] - }, - "RedactImage": { - "methods": [ - "redact_image" - ] - }, - "ReidentifyContent": { - "methods": [ - "reidentify_content" - ] - }, - "UpdateDeidentifyTemplate": { - "methods": [ - "update_deidentify_template" - ] - }, - "UpdateInspectTemplate": { - "methods": [ - "update_inspect_template" - ] - }, - "UpdateJobTrigger": { - "methods": [ - "update_job_trigger" - ] - }, - "UpdateStoredInfoType": { - "methods": [ - "update_stored_info_type" - ] - } - } - }, - "rest": { - "libraryClient": "DlpServiceClient", - "rpcs": { - "ActivateJobTrigger": { - "methods": [ - "activate_job_trigger" - ] - }, - "CancelDlpJob": { - "methods": [ - "cancel_dlp_job" - ] - }, - "CreateDeidentifyTemplate": { - "methods": [ - "create_deidentify_template" - ] - }, - "CreateDlpJob": { - "methods": [ - "create_dlp_job" - ] - }, - "CreateInspectTemplate": { - "methods": [ - "create_inspect_template" - ] - }, - "CreateJobTrigger": { - "methods": [ - "create_job_trigger" - ] - }, - "CreateStoredInfoType": { - "methods": [ - "create_stored_info_type" - ] - }, - "DeidentifyContent": { - "methods": [ - "deidentify_content" - ] - }, - "DeleteDeidentifyTemplate": { - "methods": [ - "delete_deidentify_template" - ] - }, - "DeleteDlpJob": { - "methods": [ - "delete_dlp_job" - ] - }, - "DeleteInspectTemplate": { - "methods": [ - "delete_inspect_template" - ] - }, - "DeleteJobTrigger": { - "methods": [ - "delete_job_trigger" - ] - }, - "DeleteStoredInfoType": { - "methods": [ - "delete_stored_info_type" - ] - }, - "FinishDlpJob": { - "methods": [ - "finish_dlp_job" - ] - }, - "GetDeidentifyTemplate": { - "methods": [ - "get_deidentify_template" - ] - }, - "GetDlpJob": { - "methods": [ - "get_dlp_job" - ] - }, - "GetInspectTemplate": { - "methods": [ - "get_inspect_template" - ] - }, - "GetJobTrigger": { - "methods": [ - "get_job_trigger" - ] - }, - "GetStoredInfoType": { - "methods": [ - "get_stored_info_type" - ] - }, - "HybridInspectDlpJob": { - "methods": [ - "hybrid_inspect_dlp_job" - ] - }, - "HybridInspectJobTrigger": { - "methods": [ - "hybrid_inspect_job_trigger" - ] - }, - "InspectContent": { - "methods": [ - "inspect_content" - ] - }, - "ListDeidentifyTemplates": { - "methods": [ - "list_deidentify_templates" - ] - }, - "ListDlpJobs": { - "methods": [ - "list_dlp_jobs" - ] - }, - "ListInfoTypes": { - "methods": [ - "list_info_types" - ] - }, - "ListInspectTemplates": { - "methods": [ - "list_inspect_templates" - ] - }, - "ListJobTriggers": { - "methods": [ - "list_job_triggers" - ] - }, - "ListStoredInfoTypes": { - "methods": [ - "list_stored_info_types" - ] - }, - "RedactImage": { - "methods": [ - "redact_image" - ] - }, - "ReidentifyContent": { - "methods": [ - "reidentify_content" - ] - }, - "UpdateDeidentifyTemplate": { - "methods": [ - "update_deidentify_template" - ] - }, - "UpdateInspectTemplate": { - "methods": [ - "update_inspect_template" - ] - }, - "UpdateJobTrigger": { - "methods": [ - "update_job_trigger" - ] - }, - "UpdateStoredInfoType": { - "methods": [ - "update_stored_info_type" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py deleted file mode 100644 index 405b1ceb..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed b/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed deleted file mode 100644 index 23d89ef3..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py deleted file mode 100644 index e8e1c384..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py deleted file mode 100644 index aa9c062a..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DlpServiceClient -from .async_client import DlpServiceAsyncClient - -__all__ = ( - 'DlpServiceClient', - 'DlpServiceAsyncClient', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py deleted file mode 100644 index 041479c1..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py +++ /dev/null @@ -1,4143 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dlp_v2 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dlp_v2.services.dlp_service import pagers -from google.cloud.dlp_v2.types import dlp -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport -from .client import DlpServiceClient - - -class DlpServiceAsyncClient: - """The Cloud Data Loss Prevention (DLP) API is a service that - allows clients to detect the presence of Personally Identifiable - Information (PII) and other privacy-sensitive data in - user-supplied, unstructured data streams, like text blocks or - images. - The service also includes methods for sensitive data redaction - and scheduling of data scans on Google Cloud Platform based data - sets. - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - """ - - _client: DlpServiceClient - - DEFAULT_ENDPOINT = DlpServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DlpServiceClient.DEFAULT_MTLS_ENDPOINT - - deidentify_template_path = staticmethod(DlpServiceClient.deidentify_template_path) - parse_deidentify_template_path = staticmethod(DlpServiceClient.parse_deidentify_template_path) - dlp_content_path = staticmethod(DlpServiceClient.dlp_content_path) - parse_dlp_content_path = staticmethod(DlpServiceClient.parse_dlp_content_path) - dlp_job_path = staticmethod(DlpServiceClient.dlp_job_path) - parse_dlp_job_path = staticmethod(DlpServiceClient.parse_dlp_job_path) - finding_path = staticmethod(DlpServiceClient.finding_path) - parse_finding_path = staticmethod(DlpServiceClient.parse_finding_path) - inspect_template_path = staticmethod(DlpServiceClient.inspect_template_path) - parse_inspect_template_path = staticmethod(DlpServiceClient.parse_inspect_template_path) - job_trigger_path = staticmethod(DlpServiceClient.job_trigger_path) - parse_job_trigger_path = staticmethod(DlpServiceClient.parse_job_trigger_path) - stored_info_type_path = staticmethod(DlpServiceClient.stored_info_type_path) - parse_stored_info_type_path = staticmethod(DlpServiceClient.parse_stored_info_type_path) - common_billing_account_path = staticmethod(DlpServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DlpServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DlpServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(DlpServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(DlpServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(DlpServiceClient.parse_common_organization_path) - common_project_path = staticmethod(DlpServiceClient.common_project_path) - parse_common_project_path = staticmethod(DlpServiceClient.parse_common_project_path) - common_location_path = staticmethod(DlpServiceClient.common_location_path) - parse_common_location_path = staticmethod(DlpServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceAsyncClient: The constructed client. - """ - return DlpServiceClient.from_service_account_info.__func__(DlpServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceAsyncClient: The constructed client. - """ - return DlpServiceClient.from_service_account_file.__func__(DlpServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DlpServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DlpServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DlpServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(DlpServiceClient).get_transport_class, type(DlpServiceClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DlpServiceTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the dlp service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.DlpServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DlpServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def inspect_content(self, - request: Optional[Union[dlp.InspectContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectContentResponse: - r"""Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - For how to guides, see - https://cloud.google.com/dlp/docs/inspecting-images and - https://cloud.google.com/dlp/docs/inspecting-text, - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_inspect_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.InspectContentRequest( - ) - - # Make the request - response = await client.inspect_content(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.InspectContentRequest, dict]]): - The request object. Request to search for potentially - sensitive info in a ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectContentResponse: - Results of inspecting an item. - """ - # Create or coerce a protobuf request object. - request = dlp.InspectContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.inspect_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def redact_image(self, - request: Optional[Union[dlp.RedactImageRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.RedactImageResponse: - r"""Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/dlp/docs/redacting-sensitive-data-images - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_redact_image(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.RedactImageRequest( - ) - - # Make the request - response = await client.redact_image(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.RedactImageRequest, dict]]): - The request object. Request to search for potentially - sensitive info in an image and redact it by covering it - with a colored rectangle. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.RedactImageResponse: - Results of redacting an image. - """ - # Create or coerce a protobuf request object. - request = dlp.RedactImageRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.redact_image, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def deidentify_content(self, - request: Optional[Union[dlp.DeidentifyContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyContentResponse: - r"""De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/dlp/docs/deidentify-sensitive-data - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_deidentify_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeidentifyContentRequest( - ) - - # Make the request - response = await client.deidentify_content(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeidentifyContentRequest, dict]]): - The request object. Request to de-identify a - ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyContentResponse: - Results of de-identifying a - ContentItem. - - """ - # Create or coerce a protobuf request object. - request = dlp.DeidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.deidentify_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def reidentify_content(self, - request: Optional[Union[dlp.ReidentifyContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.ReidentifyContentResponse: - r"""Re-identifies content that has been de-identified. See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_reidentify_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ReidentifyContentRequest( - parent="parent_value", - ) - - # Make the request - response = await client.reidentify_content(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ReidentifyContentRequest, dict]]): - The request object. Request to re-identify an item. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.ReidentifyContentResponse: - Results of re-identifying an item. - """ - # Create or coerce a protobuf request object. - request = dlp.ReidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.reidentify_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_info_types(self, - request: Optional[Union[dlp.ListInfoTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.ListInfoTypesResponse: - r"""Returns a list of the sensitive information types - that DLP API supports. See - https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_info_types(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListInfoTypesRequest( - ) - - # Make the request - response = await client.list_info_types(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListInfoTypesRequest, dict]]): - The request object. Request for the list of infoTypes. - parent (:class:`str`): - The parent resource name. - - The format of this value is as follows: - - :: - - locations/LOCATION_ID - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.ListInfoTypesResponse: - Response to the ListInfoTypes - request. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListInfoTypesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_info_types, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_inspect_template(self, - request: Optional[Union[dlp.CreateInspectTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - inspect_template: Optional[dlp.InspectTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Creates an InspectTemplate for reusing frequently - used configuration for inspecting content, images, and - storage. See - https://cloud.google.com/dlp/docs/creating-templates to - learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateInspectTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateInspectTemplateRequest, dict]]): - The request object. Request message for - CreateInspectTemplate. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): - Required. The InspectTemplate to - create. - - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, inspect_template]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_template is not None: - request.inspect_template = inspect_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_inspect_template, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_inspect_template(self, - request: Optional[Union[dlp.UpdateInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - inspect_template: Optional[dlp.InspectTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_update_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.update_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.UpdateInspectTemplateRequest, dict]]): - The request object. Request message for - UpdateInspectTemplate. - name (:class:`str`): - Required. Resource name of organization and - inspectTemplate to be updated, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): - New InspectTemplate value. - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, inspect_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.UpdateInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if inspect_template is not None: - request.inspect_template = inspect_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_inspect_template, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_inspect_template(self, - request: Optional[Union[dlp.GetInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetInspectTemplateRequest, dict]]): - The request object. Request message for - GetInspectTemplate. - name (:class:`str`): - Required. Resource name of the organization and - inspectTemplate to be read, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_inspect_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_inspect_templates(self, - request: Optional[Union[dlp.ListInspectTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInspectTemplatesAsyncPager: - r"""Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_inspect_templates(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListInspectTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_inspect_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListInspectTemplatesRequest, dict]]): - The request object. Request message for - ListInspectTemplates. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager: - Response message for - ListInspectTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListInspectTemplatesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_inspect_templates, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInspectTemplatesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_inspect_template(self, - request: Optional[Union[dlp.DeleteInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteInspectTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_inspect_template(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteInspectTemplateRequest, dict]]): - The request object. Request message for - DeleteInspectTemplate. - name (:class:`str`): - Required. Resource name of the organization and - inspectTemplate to be deleted, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_inspect_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_deidentify_template(self, - request: Optional[Union[dlp.CreateDeidentifyTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - deidentify_template: Optional[dlp.DeidentifyTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Creates a DeidentifyTemplate for reusing frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDeidentifyTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest, dict]]): - The request object. Request message for - CreateDeidentifyTemplate. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): - Required. The DeidentifyTemplate to - create. - - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, deidentify_template]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if deidentify_template is not None: - request.deidentify_template = deidentify_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_deidentify_template, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_deidentify_template(self, - request: Optional[Union[dlp.UpdateDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - deidentify_template: Optional[dlp.DeidentifyTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Updates the DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_update_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.update_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest, dict]]): - The request object. Request message for - UpdateDeidentifyTemplate. - name (:class:`str`): - Required. Resource name of organization and deidentify - template to be updated, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): - New DeidentifyTemplate value. - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, deidentify_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.UpdateDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if deidentify_template is not None: - request.deidentify_template = deidentify_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_deidentify_template, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_deidentify_template(self, - request: Optional[Union[dlp.GetDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Gets a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest, dict]]): - The request object. Request message for - GetDeidentifyTemplate. - name (:class:`str`): - Required. Resource name of the organization and - deidentify template to be read, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_deidentify_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_deidentify_templates(self, - request: Optional[Union[dlp.ListDeidentifyTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDeidentifyTemplatesAsyncPager: - r"""Lists DeidentifyTemplates. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_deidentify_templates(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListDeidentifyTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_deidentify_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest, dict]]): - The request object. Request message for - ListDeidentifyTemplates. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager: - Response message for - ListDeidentifyTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListDeidentifyTemplatesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_deidentify_templates, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDeidentifyTemplatesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_deidentify_template(self, - request: Optional[Union[dlp.DeleteDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_deidentify_template(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest, dict]]): - The request object. Request message for - DeleteDeidentifyTemplate. - name (:class:`str`): - Required. Resource name of the organization and - deidentify template to be deleted, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_deidentify_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_job_trigger(self, - request: Optional[Union[dlp.CreateJobTriggerRequest, dict]] = None, - *, - parent: Optional[str] = None, - job_trigger: Optional[dlp.JobTrigger] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - job_trigger = dlp_v2.JobTrigger() - job_trigger.status = "CANCELLED" - - request = dlp_v2.CreateJobTriggerRequest( - parent="parent_value", - job_trigger=job_trigger, - ) - - # Make the request - response = await client.create_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateJobTriggerRequest, dict]]): - The request object. Request message for - CreateJobTrigger. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): - Required. The JobTrigger to create. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job_trigger]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job_trigger is not None: - request.job_trigger = job_trigger - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_job_trigger, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_job_trigger(self, - request: Optional[Union[dlp.UpdateJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - job_trigger: Optional[dlp.JobTrigger] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Updates a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_update_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.update_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.UpdateJobTriggerRequest, dict]]): - The request object. Request message for - UpdateJobTrigger. - name (:class:`str`): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): - New JobTrigger value. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, job_trigger, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.UpdateJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if job_trigger is not None: - request.job_trigger = job_trigger - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_job_trigger, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def hybrid_inspect_job_trigger(self, - request: Optional[Union[dlp.HybridInspectJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_hybrid_inspect_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.hybrid_inspect_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest, dict]]): - The request object. Request to search for potentially - sensitive info in a custom location. - name (:class:`str`): - Required. Resource name of the trigger to execute a - hybrid inspect on, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.HybridInspectJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.hybrid_inspect_job_trigger, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job_trigger(self, - request: Optional[Union[dlp.GetJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Gets a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetJobTriggerRequest, dict]]): - The request object. Request message for GetJobTrigger. - name (:class:`str`): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_job_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_job_triggers(self, - request: Optional[Union[dlp.ListJobTriggersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobTriggersAsyncPager: - r"""Lists job triggers. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_job_triggers(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListJobTriggersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_triggers(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListJobTriggersRequest, dict]]): - The request object. Request message for ListJobTriggers. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager: - Response message for ListJobTriggers. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListJobTriggersRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_job_triggers, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobTriggersAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_job_trigger(self, - request: Optional[Union[dlp.DeleteJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteJobTriggerRequest( - name="name_value", - ) - - # Make the request - await client.delete_job_trigger(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteJobTriggerRequest, dict]]): - The request object. Request message for - DeleteJobTrigger. - name (:class:`str`): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_job_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def activate_job_trigger(self, - request: Optional[Union[dlp.ActivateJobTriggerRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_activate_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ActivateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.activate_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ActivateJobTriggerRequest, dict]]): - The request object. Request message for - ActivateJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - request = dlp.ActivateJobTriggerRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.activate_job_trigger, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_dlp_job(self, - request: Optional[Union[dlp.CreateDlpJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - inspect_job: Optional[dlp.InspectJobConfig] = None, - risk_job: Optional[dlp.RiskAnalysisJobConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDlpJobRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateDlpJobRequest, dict]]): - The request object. Request message for - CreateDlpJobRequest. Used to initiate long running jobs - such as calculating risk metrics or inspecting Google - Cloud Storage. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_job (:class:`google.cloud.dlp_v2.types.InspectJobConfig`): - An inspection job scans a storage - repository for InfoTypes. - - This corresponds to the ``inspect_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - risk_job (:class:`google.cloud.dlp_v2.types.RiskAnalysisJobConfig`): - A risk analysis job calculates - re-identification risk metrics for a - BigQuery table. - - This corresponds to the ``risk_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, inspect_job, risk_job]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_job is not None: - request.inspect_job = inspect_job - if risk_job is not None: - request.risk_job = risk_job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_dlp_job, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_dlp_jobs(self, - request: Optional[Union[dlp.ListDlpJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDlpJobsAsyncPager: - r"""Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_dlp_jobs(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListDlpJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_dlp_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListDlpJobsRequest, dict]]): - The request object. The request message for listing DLP - jobs. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager: - The response message for listing DLP - jobs. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListDlpJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_dlp_jobs, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDlpJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_dlp_job(self, - request: Optional[Union[dlp.GetDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetDlpJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetDlpJobRequest, dict]]): - The request object. The request message for - [DlpJobs.GetDlpJob][]. - name (:class:`str`): - Required. The name of the DlpJob - resource. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_dlp_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_dlp_job(self, - request: Optional[Union[dlp.DeleteDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be canceled if possible. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.delete_dlp_job(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteDlpJobRequest, dict]]): - The request object. The request message for deleting a - DLP job. - name (:class:`str`): - Required. The name of the DlpJob - resource to be deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_dlp_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def cancel_dlp_job(self, - request: Optional[Union[dlp.CancelDlpJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_cancel_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CancelDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.cancel_dlp_job(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CancelDlpJobRequest, dict]]): - The request object. The request message for canceling a - DLP job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - request = dlp.CancelDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_dlp_job, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_stored_info_type(self, - request: Optional[Union[dlp.CreateStoredInfoTypeRequest, dict]] = None, - *, - parent: Optional[str] = None, - config: Optional[dlp.StoredInfoTypeConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateStoredInfoTypeRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest, dict]]): - The request object. Request message for - CreateStoredInfoType. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): - Required. Configuration of the - storedInfoType to create. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, config]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if config is not None: - request.config = config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_stored_info_type, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_stored_info_type(self, - request: Optional[Union[dlp.UpdateStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - config: Optional[dlp.StoredInfoTypeConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_update_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.update_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest, dict]]): - The request object. Request message for - UpdateStoredInfoType. - name (:class:`str`): - Required. Resource name of organization and - storedInfoType to be updated, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): - Updated configuration for the - storedInfoType. If not provided, a new - version of the storedInfoType will be - created with the existing configuration. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, config, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.UpdateStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if config is not None: - request.config = config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_stored_info_type, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_stored_info_type(self, - request: Optional[Union[dlp.GetStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Gets a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetStoredInfoTypeRequest, dict]]): - The request object. Request message for - GetStoredInfoType. - name (:class:`str`): - Required. Resource name of the organization and - storedInfoType to be read, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_stored_info_type, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_stored_info_types(self, - request: Optional[Union[dlp.ListStoredInfoTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListStoredInfoTypesAsyncPager: - r"""Lists stored infoTypes. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_stored_info_types(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListStoredInfoTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_stored_info_types(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListStoredInfoTypesRequest, dict]]): - The request object. Request message for - ListStoredInfoTypes. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager: - Response message for - ListStoredInfoTypes. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListStoredInfoTypesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_stored_info_types, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListStoredInfoTypesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_stored_info_type(self, - request: Optional[Union[dlp.DeleteStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - await client.delete_stored_info_type(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest, dict]]): - The request object. Request message for - DeleteStoredInfoType. - name (:class:`str`): - Required. Resource name of the organization and - storedInfoType to be deleted, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_stored_info_type, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def hybrid_inspect_dlp_job(self, - request: Optional[Union[dlp.HybridInspectDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a job. - To review the findings, inspect the job. Inspection will - occur asynchronously. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_hybrid_inspect_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectDlpJobRequest( - name="name_value", - ) - - # Make the request - response = await client.hybrid_inspect_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.HybridInspectDlpJobRequest, dict]]): - The request object. Request to search for potentially - sensitive info in a custom location. - name (:class:`str`): - Required. Resource name of the job to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/dlpJob/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.HybridInspectDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.hybrid_inspect_dlp_job, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def finish_dlp_job(self, - request: Optional[Union[dlp.FinishDlpJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_finish_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.FinishDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.finish_dlp_job(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.FinishDlpJobRequest, dict]]): - The request object. The request message for finishing a - DLP hybrid job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - request = dlp.FinishDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.finish_dlp_job, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DlpServiceAsyncClient", -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py deleted file mode 100644 index 9d90a824..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py +++ /dev/null @@ -1,4269 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.dlp_v2 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dlp_v2.services.dlp_service import pagers -from google.cloud.dlp_v2.types import dlp -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DlpServiceGrpcTransport -from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport -from .transports.rest import DlpServiceRestTransport - - -class DlpServiceClientMeta(type): - """Metaclass for the DlpService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] - _transport_registry["grpc"] = DlpServiceGrpcTransport - _transport_registry["grpc_asyncio"] = DlpServiceGrpcAsyncIOTransport - _transport_registry["rest"] = DlpServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DlpServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DlpServiceClient(metaclass=DlpServiceClientMeta): - """The Cloud Data Loss Prevention (DLP) API is a service that - allows clients to detect the presence of Personally Identifiable - Information (PII) and other privacy-sensitive data in - user-supplied, unstructured data streams, like text blocks or - images. - The service also includes methods for sensitive data redaction - and scheduling of data scans on Google Cloud Platform based data - sets. - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "dlp.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DlpServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DlpServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def deidentify_template_path(organization: str,deidentify_template: str,) -> str: - """Returns a fully-qualified deidentify_template string.""" - return "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) - - @staticmethod - def parse_deidentify_template_path(path: str) -> Dict[str,str]: - """Parses a deidentify_template path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/deidentifyTemplates/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def dlp_content_path(project: str,) -> str: - """Returns a fully-qualified dlp_content string.""" - return "projects/{project}/dlpContent".format(project=project, ) - - @staticmethod - def parse_dlp_content_path(path: str) -> Dict[str,str]: - """Parses a dlp_content path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/dlpContent$", path) - return m.groupdict() if m else {} - - @staticmethod - def dlp_job_path(project: str,dlp_job: str,) -> str: - """Returns a fully-qualified dlp_job string.""" - return "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) - - @staticmethod - def parse_dlp_job_path(path: str) -> Dict[str,str]: - """Parses a dlp_job path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/dlpJobs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def finding_path(project: str,location: str,finding: str,) -> str: - """Returns a fully-qualified finding string.""" - return "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) - - @staticmethod - def parse_finding_path(path: str) -> Dict[str,str]: - """Parses a finding path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/findings/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def inspect_template_path(organization: str,inspect_template: str,) -> str: - """Returns a fully-qualified inspect_template string.""" - return "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) - - @staticmethod - def parse_inspect_template_path(path: str) -> Dict[str,str]: - """Parses a inspect_template path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/inspectTemplates/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def job_trigger_path(project: str,job_trigger: str,) -> str: - """Returns a fully-qualified job_trigger string.""" - return "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) - - @staticmethod - def parse_job_trigger_path(path: str) -> Dict[str,str]: - """Parses a job_trigger path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/jobTriggers/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def stored_info_type_path(organization: str,stored_info_type: str,) -> str: - """Returns a fully-qualified stored_info_type string.""" - return "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) - - @staticmethod - def parse_stored_info_type_path(path: str) -> Dict[str,str]: - """Parses a stored_info_type path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/storedInfoTypes/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DlpServiceTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the dlp service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, DlpServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, DlpServiceTransport): - # transport is a DlpServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def inspect_content(self, - request: Optional[Union[dlp.InspectContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectContentResponse: - r"""Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - For how to guides, see - https://cloud.google.com/dlp/docs/inspecting-images and - https://cloud.google.com/dlp/docs/inspecting-text, - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_inspect_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.InspectContentRequest( - ) - - # Make the request - response = client.inspect_content(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.InspectContentRequest, dict]): - The request object. Request to search for potentially - sensitive info in a ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectContentResponse: - Results of inspecting an item. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.InspectContentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.InspectContentRequest): - request = dlp.InspectContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.inspect_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def redact_image(self, - request: Optional[Union[dlp.RedactImageRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.RedactImageResponse: - r"""Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/dlp/docs/redacting-sensitive-data-images - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_redact_image(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.RedactImageRequest( - ) - - # Make the request - response = client.redact_image(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.RedactImageRequest, dict]): - The request object. Request to search for potentially - sensitive info in an image and redact it by covering it - with a colored rectangle. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.RedactImageResponse: - Results of redacting an image. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.RedactImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.RedactImageRequest): - request = dlp.RedactImageRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.redact_image] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def deidentify_content(self, - request: Optional[Union[dlp.DeidentifyContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyContentResponse: - r"""De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/dlp/docs/deidentify-sensitive-data - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_deidentify_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeidentifyContentRequest( - ) - - # Make the request - response = client.deidentify_content(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.DeidentifyContentRequest, dict]): - The request object. Request to de-identify a - ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyContentResponse: - Results of de-identifying a - ContentItem. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeidentifyContentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeidentifyContentRequest): - request = dlp.DeidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.deidentify_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def reidentify_content(self, - request: Optional[Union[dlp.ReidentifyContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.ReidentifyContentResponse: - r"""Re-identifies content that has been de-identified. See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_reidentify_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ReidentifyContentRequest( - parent="parent_value", - ) - - # Make the request - response = client.reidentify_content(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ReidentifyContentRequest, dict]): - The request object. Request to re-identify an item. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.ReidentifyContentResponse: - Results of re-identifying an item. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ReidentifyContentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ReidentifyContentRequest): - request = dlp.ReidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.reidentify_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_info_types(self, - request: Optional[Union[dlp.ListInfoTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.ListInfoTypesResponse: - r"""Returns a list of the sensitive information types - that DLP API supports. See - https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_info_types(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListInfoTypesRequest( - ) - - # Make the request - response = client.list_info_types(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListInfoTypesRequest, dict]): - The request object. Request for the list of infoTypes. - parent (str): - The parent resource name. - - The format of this value is as follows: - - :: - - locations/LOCATION_ID - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.ListInfoTypesResponse: - Response to the ListInfoTypes - request. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListInfoTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListInfoTypesRequest): - request = dlp.ListInfoTypesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_info_types] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_inspect_template(self, - request: Optional[Union[dlp.CreateInspectTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - inspect_template: Optional[dlp.InspectTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Creates an InspectTemplate for reusing frequently - used configuration for inspecting content, images, and - storage. See - https://cloud.google.com/dlp/docs/creating-templates to - learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateInspectTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateInspectTemplateRequest, dict]): - The request object. Request message for - CreateInspectTemplate. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - Required. The InspectTemplate to - create. - - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, inspect_template]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateInspectTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateInspectTemplateRequest): - request = dlp.CreateInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_template is not None: - request.inspect_template = inspect_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_inspect_template(self, - request: Optional[Union[dlp.UpdateInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - inspect_template: Optional[dlp.InspectTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_update_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.update_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.UpdateInspectTemplateRequest, dict]): - The request object. Request message for - UpdateInspectTemplate. - name (str): - Required. Resource name of organization and - inspectTemplate to be updated, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - New InspectTemplate value. - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, inspect_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.UpdateInspectTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.UpdateInspectTemplateRequest): - request = dlp.UpdateInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if inspect_template is not None: - request.inspect_template = inspect_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_inspect_template(self, - request: Optional[Union[dlp.GetInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetInspectTemplateRequest, dict]): - The request object. Request message for - GetInspectTemplate. - name (str): - Required. Resource name of the organization and - inspectTemplate to be read, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetInspectTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetInspectTemplateRequest): - request = dlp.GetInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_inspect_templates(self, - request: Optional[Union[dlp.ListInspectTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInspectTemplatesPager: - r"""Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_inspect_templates(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListInspectTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_inspect_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListInspectTemplatesRequest, dict]): - The request object. Request message for - ListInspectTemplates. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager: - Response message for - ListInspectTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListInspectTemplatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListInspectTemplatesRequest): - request = dlp.ListInspectTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_inspect_templates] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInspectTemplatesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_inspect_template(self, - request: Optional[Union[dlp.DeleteInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteInspectTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_inspect_template(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteInspectTemplateRequest, dict]): - The request object. Request message for - DeleteInspectTemplate. - name (str): - Required. Resource name of the organization and - inspectTemplate to be deleted, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteInspectTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteInspectTemplateRequest): - request = dlp.DeleteInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_deidentify_template(self, - request: Optional[Union[dlp.CreateDeidentifyTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - deidentify_template: Optional[dlp.DeidentifyTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Creates a DeidentifyTemplate for reusing frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDeidentifyTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest, dict]): - The request object. Request message for - CreateDeidentifyTemplate. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - Required. The DeidentifyTemplate to - create. - - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, deidentify_template]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateDeidentifyTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateDeidentifyTemplateRequest): - request = dlp.CreateDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if deidentify_template is not None: - request.deidentify_template = deidentify_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_deidentify_template(self, - request: Optional[Union[dlp.UpdateDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - deidentify_template: Optional[dlp.DeidentifyTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Updates the DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_update_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.update_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest, dict]): - The request object. Request message for - UpdateDeidentifyTemplate. - name (str): - Required. Resource name of organization and deidentify - template to be updated, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - New DeidentifyTemplate value. - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, deidentify_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.UpdateDeidentifyTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.UpdateDeidentifyTemplateRequest): - request = dlp.UpdateDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if deidentify_template is not None: - request.deidentify_template = deidentify_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_deidentify_template(self, - request: Optional[Union[dlp.GetDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Gets a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest, dict]): - The request object. Request message for - GetDeidentifyTemplate. - name (str): - Required. Resource name of the organization and - deidentify template to be read, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetDeidentifyTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetDeidentifyTemplateRequest): - request = dlp.GetDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_deidentify_templates(self, - request: Optional[Union[dlp.ListDeidentifyTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDeidentifyTemplatesPager: - r"""Lists DeidentifyTemplates. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_deidentify_templates(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListDeidentifyTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_deidentify_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest, dict]): - The request object. Request message for - ListDeidentifyTemplates. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager: - Response message for - ListDeidentifyTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListDeidentifyTemplatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListDeidentifyTemplatesRequest): - request = dlp.ListDeidentifyTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_deidentify_templates] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDeidentifyTemplatesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_deidentify_template(self, - request: Optional[Union[dlp.DeleteDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_deidentify_template(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest, dict]): - The request object. Request message for - DeleteDeidentifyTemplate. - name (str): - Required. Resource name of the organization and - deidentify template to be deleted, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteDeidentifyTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteDeidentifyTemplateRequest): - request = dlp.DeleteDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_job_trigger(self, - request: Optional[Union[dlp.CreateJobTriggerRequest, dict]] = None, - *, - parent: Optional[str] = None, - job_trigger: Optional[dlp.JobTrigger] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - job_trigger = dlp_v2.JobTrigger() - job_trigger.status = "CANCELLED" - - request = dlp_v2.CreateJobTriggerRequest( - parent="parent_value", - job_trigger=job_trigger, - ) - - # Make the request - response = client.create_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateJobTriggerRequest, dict]): - The request object. Request message for - CreateJobTrigger. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - Required. The JobTrigger to create. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job_trigger]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateJobTriggerRequest): - request = dlp.CreateJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job_trigger is not None: - request.job_trigger = job_trigger - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_job_trigger(self, - request: Optional[Union[dlp.UpdateJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - job_trigger: Optional[dlp.JobTrigger] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Updates a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_update_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.update_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.UpdateJobTriggerRequest, dict]): - The request object. Request message for - UpdateJobTrigger. - name (str): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - New JobTrigger value. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, job_trigger, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.UpdateJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.UpdateJobTriggerRequest): - request = dlp.UpdateJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if job_trigger is not None: - request.job_trigger = job_trigger - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def hybrid_inspect_job_trigger(self, - request: Optional[Union[dlp.HybridInspectJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_hybrid_inspect_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.hybrid_inspect_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest, dict]): - The request object. Request to search for potentially - sensitive info in a custom location. - name (str): - Required. Resource name of the trigger to execute a - hybrid inspect on, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.HybridInspectJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.HybridInspectJobTriggerRequest): - request = dlp.HybridInspectJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job_trigger(self, - request: Optional[Union[dlp.GetJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Gets a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.get_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetJobTriggerRequest, dict]): - The request object. Request message for GetJobTrigger. - name (str): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetJobTriggerRequest): - request = dlp.GetJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_job_triggers(self, - request: Optional[Union[dlp.ListJobTriggersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobTriggersPager: - r"""Lists job triggers. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_job_triggers(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListJobTriggersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_triggers(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListJobTriggersRequest, dict]): - The request object. Request message for ListJobTriggers. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager: - Response message for ListJobTriggers. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListJobTriggersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListJobTriggersRequest): - request = dlp.ListJobTriggersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_job_triggers] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobTriggersPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_job_trigger(self, - request: Optional[Union[dlp.DeleteJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteJobTriggerRequest( - name="name_value", - ) - - # Make the request - client.delete_job_trigger(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteJobTriggerRequest, dict]): - The request object. Request message for - DeleteJobTrigger. - name (str): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteJobTriggerRequest): - request = dlp.DeleteJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def activate_job_trigger(self, - request: Optional[Union[dlp.ActivateJobTriggerRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_activate_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ActivateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.activate_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ActivateJobTriggerRequest, dict]): - The request object. Request message for - ActivateJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ActivateJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ActivateJobTriggerRequest): - request = dlp.ActivateJobTriggerRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.activate_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_dlp_job(self, - request: Optional[Union[dlp.CreateDlpJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - inspect_job: Optional[dlp.InspectJobConfig] = None, - risk_job: Optional[dlp.RiskAnalysisJobConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDlpJobRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateDlpJobRequest, dict]): - The request object. Request message for - CreateDlpJobRequest. Used to initiate long running jobs - such as calculating risk metrics or inspecting Google - Cloud Storage. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): - An inspection job scans a storage - repository for InfoTypes. - - This corresponds to the ``inspect_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): - A risk analysis job calculates - re-identification risk metrics for a - BigQuery table. - - This corresponds to the ``risk_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, inspect_job, risk_job]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateDlpJobRequest): - request = dlp.CreateDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_job is not None: - request.inspect_job = inspect_job - if risk_job is not None: - request.risk_job = risk_job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_dlp_jobs(self, - request: Optional[Union[dlp.ListDlpJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDlpJobsPager: - r"""Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_dlp_jobs(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListDlpJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_dlp_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListDlpJobsRequest, dict]): - The request object. The request message for listing DLP - jobs. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager: - The response message for listing DLP - jobs. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListDlpJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListDlpJobsRequest): - request = dlp.ListDlpJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_dlp_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDlpJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_dlp_job(self, - request: Optional[Union[dlp.GetDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetDlpJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetDlpJobRequest, dict]): - The request object. The request message for - [DlpJobs.GetDlpJob][]. - name (str): - Required. The name of the DlpJob - resource. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetDlpJobRequest): - request = dlp.GetDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_dlp_job(self, - request: Optional[Union[dlp.DeleteDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be canceled if possible. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDlpJobRequest( - name="name_value", - ) - - # Make the request - client.delete_dlp_job(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteDlpJobRequest, dict]): - The request object. The request message for deleting a - DLP job. - name (str): - Required. The name of the DlpJob - resource to be deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteDlpJobRequest): - request = dlp.DeleteDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def cancel_dlp_job(self, - request: Optional[Union[dlp.CancelDlpJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_cancel_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CancelDlpJobRequest( - name="name_value", - ) - - # Make the request - client.cancel_dlp_job(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.CancelDlpJobRequest, dict]): - The request object. The request message for canceling a - DLP job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CancelDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CancelDlpJobRequest): - request = dlp.CancelDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_stored_info_type(self, - request: Optional[Union[dlp.CreateStoredInfoTypeRequest, dict]] = None, - *, - parent: Optional[str] = None, - config: Optional[dlp.StoredInfoTypeConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateStoredInfoTypeRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest, dict]): - The request object. Request message for - CreateStoredInfoType. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Required. Configuration of the - storedInfoType to create. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, config]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateStoredInfoTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateStoredInfoTypeRequest): - request = dlp.CreateStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if config is not None: - request.config = config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_stored_info_type(self, - request: Optional[Union[dlp.UpdateStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - config: Optional[dlp.StoredInfoTypeConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_update_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = client.update_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest, dict]): - The request object. Request message for - UpdateStoredInfoType. - name (str): - Required. Resource name of organization and - storedInfoType to be updated, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Updated configuration for the - storedInfoType. If not provided, a new - version of the storedInfoType will be - created with the existing configuration. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, config, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.UpdateStoredInfoTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.UpdateStoredInfoTypeRequest): - request = dlp.UpdateStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if config is not None: - request.config = config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_stored_info_type(self, - request: Optional[Union[dlp.GetStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Gets a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = client.get_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetStoredInfoTypeRequest, dict]): - The request object. Request message for - GetStoredInfoType. - name (str): - Required. Resource name of the organization and - storedInfoType to be read, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetStoredInfoTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetStoredInfoTypeRequest): - request = dlp.GetStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_stored_info_types(self, - request: Optional[Union[dlp.ListStoredInfoTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListStoredInfoTypesPager: - r"""Lists stored infoTypes. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_stored_info_types(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListStoredInfoTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_stored_info_types(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListStoredInfoTypesRequest, dict]): - The request object. Request message for - ListStoredInfoTypes. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager: - Response message for - ListStoredInfoTypes. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListStoredInfoTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListStoredInfoTypesRequest): - request = dlp.ListStoredInfoTypesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_stored_info_types] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListStoredInfoTypesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_stored_info_type(self, - request: Optional[Union[dlp.DeleteStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - client.delete_stored_info_type(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest, dict]): - The request object. Request message for - DeleteStoredInfoType. - name (str): - Required. Resource name of the organization and - storedInfoType to be deleted, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteStoredInfoTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteStoredInfoTypeRequest): - request = dlp.DeleteStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def hybrid_inspect_dlp_job(self, - request: Optional[Union[dlp.HybridInspectDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a job. - To review the findings, inspect the job. Inspection will - occur asynchronously. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_hybrid_inspect_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectDlpJobRequest( - name="name_value", - ) - - # Make the request - response = client.hybrid_inspect_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.HybridInspectDlpJobRequest, dict]): - The request object. Request to search for potentially - sensitive info in a custom location. - name (str): - Required. Resource name of the job to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/dlpJob/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.HybridInspectDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.HybridInspectDlpJobRequest): - request = dlp.HybridInspectDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def finish_dlp_job(self, - request: Optional[Union[dlp.FinishDlpJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_finish_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.FinishDlpJobRequest( - name="name_value", - ) - - # Make the request - client.finish_dlp_job(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.FinishDlpJobRequest, dict]): - The request object. The request message for finishing a - DLP hybrid job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.FinishDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.FinishDlpJobRequest): - request = dlp.FinishDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.finish_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def __enter__(self) -> "DlpServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DlpServiceClient", -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py deleted file mode 100644 index 73a0e48f..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py +++ /dev/null @@ -1,623 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.dlp_v2.types import dlp - - -class ListInspectTemplatesPager: - """A pager for iterating through ``list_inspect_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``inspect_templates`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInspectTemplates`` requests and continue to iterate - through the ``inspect_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListInspectTemplatesResponse], - request: dlp.ListInspectTemplatesRequest, - response: dlp.ListInspectTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListInspectTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListInspectTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.InspectTemplate]: - for page in self.pages: - yield from page.inspect_templates - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInspectTemplatesAsyncPager: - """A pager for iterating through ``list_inspect_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``inspect_templates`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInspectTemplates`` requests and continue to iterate - through the ``inspect_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListInspectTemplatesResponse]], - request: dlp.ListInspectTemplatesRequest, - response: dlp.ListInspectTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListInspectTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListInspectTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.InspectTemplate]: - async def async_generator(): - async for page in self.pages: - for response in page.inspect_templates: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDeidentifyTemplatesPager: - """A pager for iterating through ``list_deidentify_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``deidentify_templates`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDeidentifyTemplates`` requests and continue to iterate - through the ``deidentify_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListDeidentifyTemplatesResponse], - request: dlp.ListDeidentifyTemplatesRequest, - response: dlp.ListDeidentifyTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListDeidentifyTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListDeidentifyTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.DeidentifyTemplate]: - for page in self.pages: - yield from page.deidentify_templates - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDeidentifyTemplatesAsyncPager: - """A pager for iterating through ``list_deidentify_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``deidentify_templates`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDeidentifyTemplates`` requests and continue to iterate - through the ``deidentify_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListDeidentifyTemplatesResponse]], - request: dlp.ListDeidentifyTemplatesRequest, - response: dlp.ListDeidentifyTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListDeidentifyTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListDeidentifyTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.DeidentifyTemplate]: - async def async_generator(): - async for page in self.pages: - for response in page.deidentify_templates: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobTriggersPager: - """A pager for iterating through ``list_job_triggers`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and - provides an ``__iter__`` method to iterate through its - ``job_triggers`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobTriggers`` requests and continue to iterate - through the ``job_triggers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListJobTriggersResponse], - request: dlp.ListJobTriggersRequest, - response: dlp.ListJobTriggersResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListJobTriggersRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListJobTriggersResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListJobTriggersRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListJobTriggersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.JobTrigger]: - for page in self.pages: - yield from page.job_triggers - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobTriggersAsyncPager: - """A pager for iterating through ``list_job_triggers`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``job_triggers`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobTriggers`` requests and continue to iterate - through the ``job_triggers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListJobTriggersResponse]], - request: dlp.ListJobTriggersRequest, - response: dlp.ListJobTriggersResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListJobTriggersRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListJobTriggersResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListJobTriggersRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListJobTriggersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.JobTrigger]: - async def async_generator(): - async for page in self.pages: - for response in page.job_triggers: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDlpJobsPager: - """A pager for iterating through ``list_dlp_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDlpJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListDlpJobsResponse], - request: dlp.ListDlpJobsRequest, - response: dlp.ListDlpJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDlpJobsRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDlpJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListDlpJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListDlpJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.DlpJob]: - for page in self.pages: - yield from page.jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDlpJobsAsyncPager: - """A pager for iterating through ``list_dlp_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDlpJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListDlpJobsResponse]], - request: dlp.ListDlpJobsRequest, - response: dlp.ListDlpJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDlpJobsRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDlpJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListDlpJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListDlpJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.DlpJob]: - async def async_generator(): - async for page in self.pages: - for response in page.jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListStoredInfoTypesPager: - """A pager for iterating through ``list_stored_info_types`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``stored_info_types`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListStoredInfoTypes`` requests and continue to iterate - through the ``stored_info_types`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListStoredInfoTypesResponse], - request: dlp.ListStoredInfoTypesRequest, - response: dlp.ListStoredInfoTypesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListStoredInfoTypesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListStoredInfoTypesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.StoredInfoType]: - for page in self.pages: - yield from page.stored_info_types - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListStoredInfoTypesAsyncPager: - """A pager for iterating through ``list_stored_info_types`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``stored_info_types`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListStoredInfoTypes`` requests and continue to iterate - through the ``stored_info_types`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListStoredInfoTypesResponse]], - request: dlp.ListStoredInfoTypesRequest, - response: dlp.ListStoredInfoTypesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListStoredInfoTypesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListStoredInfoTypesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.StoredInfoType]: - async def async_generator(): - async for page in self.pages: - for response in page.stored_info_types: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py deleted file mode 100644 index df9b4279..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DlpServiceTransport -from .grpc import DlpServiceGrpcTransport -from .grpc_asyncio import DlpServiceGrpcAsyncIOTransport -from .rest import DlpServiceRestTransport -from .rest import DlpServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] -_transport_registry['grpc'] = DlpServiceGrpcTransport -_transport_registry['grpc_asyncio'] = DlpServiceGrpcAsyncIOTransport -_transport_registry['rest'] = DlpServiceRestTransport - -__all__ = ( - 'DlpServiceTransport', - 'DlpServiceGrpcTransport', - 'DlpServiceGrpcAsyncIOTransport', - 'DlpServiceRestTransport', - 'DlpServiceRestInterceptor', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py deleted file mode 100644 index e90545e1..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py +++ /dev/null @@ -1,752 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dlp_v2 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dlp_v2.types import dlp -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class DlpServiceTransport(abc.ABC): - """Abstract transport class for DlpService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dlp.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.inspect_content: gapic_v1.method.wrap_method( - self.inspect_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.redact_image: gapic_v1.method.wrap_method( - self.redact_image, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.deidentify_content: gapic_v1.method.wrap_method( - self.deidentify_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.reidentify_content: gapic_v1.method.wrap_method( - self.reidentify_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_info_types: gapic_v1.method.wrap_method( - self.list_info_types, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_inspect_template: gapic_v1.method.wrap_method( - self.create_inspect_template, - default_timeout=300.0, - client_info=client_info, - ), - self.update_inspect_template: gapic_v1.method.wrap_method( - self.update_inspect_template, - default_timeout=300.0, - client_info=client_info, - ), - self.get_inspect_template: gapic_v1.method.wrap_method( - self.get_inspect_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_inspect_templates: gapic_v1.method.wrap_method( - self.list_inspect_templates, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_inspect_template: gapic_v1.method.wrap_method( - self.delete_inspect_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_deidentify_template: gapic_v1.method.wrap_method( - self.create_deidentify_template, - default_timeout=300.0, - client_info=client_info, - ), - self.update_deidentify_template: gapic_v1.method.wrap_method( - self.update_deidentify_template, - default_timeout=300.0, - client_info=client_info, - ), - self.get_deidentify_template: gapic_v1.method.wrap_method( - self.get_deidentify_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_deidentify_templates: gapic_v1.method.wrap_method( - self.list_deidentify_templates, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_deidentify_template: gapic_v1.method.wrap_method( - self.delete_deidentify_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_job_trigger: gapic_v1.method.wrap_method( - self.create_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.update_job_trigger: gapic_v1.method.wrap_method( - self.update_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.hybrid_inspect_job_trigger: gapic_v1.method.wrap_method( - self.hybrid_inspect_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.get_job_trigger: gapic_v1.method.wrap_method( - self.get_job_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_job_triggers: gapic_v1.method.wrap_method( - self.list_job_triggers, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_job_trigger: gapic_v1.method.wrap_method( - self.delete_job_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.activate_job_trigger: gapic_v1.method.wrap_method( - self.activate_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.create_dlp_job: gapic_v1.method.wrap_method( - self.create_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.list_dlp_jobs: gapic_v1.method.wrap_method( - self.list_dlp_jobs, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_dlp_job: gapic_v1.method.wrap_method( - self.get_dlp_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_dlp_job: gapic_v1.method.wrap_method( - self.delete_dlp_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.cancel_dlp_job: gapic_v1.method.wrap_method( - self.cancel_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.create_stored_info_type: gapic_v1.method.wrap_method( - self.create_stored_info_type, - default_timeout=300.0, - client_info=client_info, - ), - self.update_stored_info_type: gapic_v1.method.wrap_method( - self.update_stored_info_type, - default_timeout=300.0, - client_info=client_info, - ), - self.get_stored_info_type: gapic_v1.method.wrap_method( - self.get_stored_info_type, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_stored_info_types: gapic_v1.method.wrap_method( - self.list_stored_info_types, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_stored_info_type: gapic_v1.method.wrap_method( - self.delete_stored_info_type, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.hybrid_inspect_dlp_job: gapic_v1.method.wrap_method( - self.hybrid_inspect_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.finish_dlp_job: gapic_v1.method.wrap_method( - self.finish_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - Union[ - dlp.InspectContentResponse, - Awaitable[dlp.InspectContentResponse] - ]]: - raise NotImplementedError() - - @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - Union[ - dlp.RedactImageResponse, - Awaitable[dlp.RedactImageResponse] - ]]: - raise NotImplementedError() - - @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - Union[ - dlp.DeidentifyContentResponse, - Awaitable[dlp.DeidentifyContentResponse] - ]]: - raise NotImplementedError() - - @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - Union[ - dlp.ReidentifyContentResponse, - Awaitable[dlp.ReidentifyContentResponse] - ]]: - raise NotImplementedError() - - @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - Union[ - dlp.ListInfoTypesResponse, - Awaitable[dlp.ListInfoTypesResponse] - ]]: - raise NotImplementedError() - - @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - Union[ - dlp.InspectTemplate, - Awaitable[dlp.InspectTemplate] - ]]: - raise NotImplementedError() - - @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - Union[ - dlp.InspectTemplate, - Awaitable[dlp.InspectTemplate] - ]]: - raise NotImplementedError() - - @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - Union[ - dlp.InspectTemplate, - Awaitable[dlp.InspectTemplate] - ]]: - raise NotImplementedError() - - @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - Union[ - dlp.ListInspectTemplatesResponse, - Awaitable[dlp.ListInspectTemplatesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - Union[ - dlp.DeidentifyTemplate, - Awaitable[dlp.DeidentifyTemplate] - ]]: - raise NotImplementedError() - - @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - Union[ - dlp.DeidentifyTemplate, - Awaitable[dlp.DeidentifyTemplate] - ]]: - raise NotImplementedError() - - @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - Union[ - dlp.DeidentifyTemplate, - Awaitable[dlp.DeidentifyTemplate] - ]]: - raise NotImplementedError() - - @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - Union[ - dlp.ListDeidentifyTemplatesResponse, - Awaitable[dlp.ListDeidentifyTemplatesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - Union[ - dlp.JobTrigger, - Awaitable[dlp.JobTrigger] - ]]: - raise NotImplementedError() - - @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - Union[ - dlp.JobTrigger, - Awaitable[dlp.JobTrigger] - ]]: - raise NotImplementedError() - - @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - Union[ - dlp.HybridInspectResponse, - Awaitable[dlp.HybridInspectResponse] - ]]: - raise NotImplementedError() - - @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - Union[ - dlp.JobTrigger, - Awaitable[dlp.JobTrigger] - ]]: - raise NotImplementedError() - - @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - Union[ - dlp.ListJobTriggersResponse, - Awaitable[dlp.ListJobTriggersResponse] - ]]: - raise NotImplementedError() - - @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - Union[ - dlp.DlpJob, - Awaitable[dlp.DlpJob] - ]]: - raise NotImplementedError() - - @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - Union[ - dlp.DlpJob, - Awaitable[dlp.DlpJob] - ]]: - raise NotImplementedError() - - @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - Union[ - dlp.ListDlpJobsResponse, - Awaitable[dlp.ListDlpJobsResponse] - ]]: - raise NotImplementedError() - - @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - Union[ - dlp.DlpJob, - Awaitable[dlp.DlpJob] - ]]: - raise NotImplementedError() - - @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - Union[ - dlp.StoredInfoType, - Awaitable[dlp.StoredInfoType] - ]]: - raise NotImplementedError() - - @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - Union[ - dlp.StoredInfoType, - Awaitable[dlp.StoredInfoType] - ]]: - raise NotImplementedError() - - @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - Union[ - dlp.StoredInfoType, - Awaitable[dlp.StoredInfoType] - ]]: - raise NotImplementedError() - - @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - Union[ - dlp.ListStoredInfoTypesResponse, - Awaitable[dlp.ListStoredInfoTypesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - Union[ - dlp.HybridInspectResponse, - Awaitable[dlp.HybridInspectResponse] - ]]: - raise NotImplementedError() - - @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DlpServiceTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py deleted file mode 100644 index d95be0ba..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py +++ /dev/null @@ -1,1262 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dlp_v2.types import dlp -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO - - -class DlpServiceGrpcTransport(DlpServiceTransport): - """gRPC backend transport for DlpService. - - The Cloud Data Loss Prevention (DLP) API is a service that - allows clients to detect the presence of Personally Identifiable - Information (PII) and other privacy-sensitive data in - user-supplied, unstructured data streams, like text blocks or - images. - The service also includes methods for sensitive data redaction - and scheduling of data scans on Google Cloud Platform based data - sets. - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - dlp.InspectContentResponse]: - r"""Return a callable for the inspect content method over gRPC. - - Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - For how to guides, see - https://cloud.google.com/dlp/docs/inspecting-images and - https://cloud.google.com/dlp/docs/inspecting-text, - - Returns: - Callable[[~.InspectContentRequest], - ~.InspectContentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'inspect_content' not in self._stubs: - self._stubs['inspect_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/InspectContent', - request_serializer=dlp.InspectContentRequest.serialize, - response_deserializer=dlp.InspectContentResponse.deserialize, - ) - return self._stubs['inspect_content'] - - @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - dlp.RedactImageResponse]: - r"""Return a callable for the redact image method over gRPC. - - Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/dlp/docs/redacting-sensitive-data-images - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.RedactImageRequest], - ~.RedactImageResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'redact_image' not in self._stubs: - self._stubs['redact_image'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/RedactImage', - request_serializer=dlp.RedactImageRequest.serialize, - response_deserializer=dlp.RedactImageResponse.deserialize, - ) - return self._stubs['redact_image'] - - @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - dlp.DeidentifyContentResponse]: - r"""Return a callable for the deidentify content method over gRPC. - - De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/dlp/docs/deidentify-sensitive-data - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.DeidentifyContentRequest], - ~.DeidentifyContentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'deidentify_content' not in self._stubs: - self._stubs['deidentify_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeidentifyContent', - request_serializer=dlp.DeidentifyContentRequest.serialize, - response_deserializer=dlp.DeidentifyContentResponse.deserialize, - ) - return self._stubs['deidentify_content'] - - @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - dlp.ReidentifyContentResponse]: - r"""Return a callable for the reidentify content method over gRPC. - - Re-identifies content that has been de-identified. See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - Returns: - Callable[[~.ReidentifyContentRequest], - ~.ReidentifyContentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'reidentify_content' not in self._stubs: - self._stubs['reidentify_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ReidentifyContent', - request_serializer=dlp.ReidentifyContentRequest.serialize, - response_deserializer=dlp.ReidentifyContentResponse.deserialize, - ) - return self._stubs['reidentify_content'] - - @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - dlp.ListInfoTypesResponse]: - r"""Return a callable for the list info types method over gRPC. - - Returns a list of the sensitive information types - that DLP API supports. See - https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - - Returns: - Callable[[~.ListInfoTypesRequest], - ~.ListInfoTypesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_info_types' not in self._stubs: - self._stubs['list_info_types'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInfoTypes', - request_serializer=dlp.ListInfoTypesRequest.serialize, - response_deserializer=dlp.ListInfoTypesResponse.deserialize, - ) - return self._stubs['list_info_types'] - - @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - dlp.InspectTemplate]: - r"""Return a callable for the create inspect template method over gRPC. - - Creates an InspectTemplate for reusing frequently - used configuration for inspecting content, images, and - storage. See - https://cloud.google.com/dlp/docs/creating-templates to - learn more. - - Returns: - Callable[[~.CreateInspectTemplateRequest], - ~.InspectTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_inspect_template' not in self._stubs: - self._stubs['create_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', - request_serializer=dlp.CreateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['create_inspect_template'] - - @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - dlp.InspectTemplate]: - r"""Return a callable for the update inspect template method over gRPC. - - Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.UpdateInspectTemplateRequest], - ~.InspectTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_inspect_template' not in self._stubs: - self._stubs['update_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', - request_serializer=dlp.UpdateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['update_inspect_template'] - - @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - dlp.InspectTemplate]: - r"""Return a callable for the get inspect template method over gRPC. - - Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.GetInspectTemplateRequest], - ~.InspectTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_inspect_template' not in self._stubs: - self._stubs['get_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', - request_serializer=dlp.GetInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['get_inspect_template'] - - @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - dlp.ListInspectTemplatesResponse]: - r"""Return a callable for the list inspect templates method over gRPC. - - Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.ListInspectTemplatesRequest], - ~.ListInspectTemplatesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_inspect_templates' not in self._stubs: - self._stubs['list_inspect_templates'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', - request_serializer=dlp.ListInspectTemplatesRequest.serialize, - response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, - ) - return self._stubs['list_inspect_templates'] - - @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete inspect template method over gRPC. - - Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.DeleteInspectTemplateRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_inspect_template' not in self._stubs: - self._stubs['delete_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', - request_serializer=dlp.DeleteInspectTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_inspect_template'] - - @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - r"""Return a callable for the create deidentify template method over gRPC. - - Creates a DeidentifyTemplate for reusing frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.CreateDeidentifyTemplateRequest], - ~.DeidentifyTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_deidentify_template' not in self._stubs: - self._stubs['create_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', - request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['create_deidentify_template'] - - @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - r"""Return a callable for the update deidentify template method over gRPC. - - Updates the DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.UpdateDeidentifyTemplateRequest], - ~.DeidentifyTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_deidentify_template' not in self._stubs: - self._stubs['update_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', - request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['update_deidentify_template'] - - @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - r"""Return a callable for the get deidentify template method over gRPC. - - Gets a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.GetDeidentifyTemplateRequest], - ~.DeidentifyTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_deidentify_template' not in self._stubs: - self._stubs['get_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', - request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['get_deidentify_template'] - - @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - dlp.ListDeidentifyTemplatesResponse]: - r"""Return a callable for the list deidentify templates method over gRPC. - - Lists DeidentifyTemplates. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.ListDeidentifyTemplatesRequest], - ~.ListDeidentifyTemplatesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_deidentify_templates' not in self._stubs: - self._stubs['list_deidentify_templates'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', - request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, - response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, - ) - return self._stubs['list_deidentify_templates'] - - @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete deidentify template method over gRPC. - - Deletes a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.DeleteDeidentifyTemplateRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_deidentify_template' not in self._stubs: - self._stubs['delete_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', - request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_deidentify_template'] - - @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - dlp.JobTrigger]: - r"""Return a callable for the create job trigger method over gRPC. - - Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.CreateJobTriggerRequest], - ~.JobTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job_trigger' not in self._stubs: - self._stubs['create_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', - request_serializer=dlp.CreateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['create_job_trigger'] - - @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - dlp.JobTrigger]: - r"""Return a callable for the update job trigger method over gRPC. - - Updates a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.UpdateJobTriggerRequest], - ~.JobTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_job_trigger' not in self._stubs: - self._stubs['update_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', - request_serializer=dlp.UpdateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['update_job_trigger'] - - @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - dlp.HybridInspectResponse]: - r"""Return a callable for the hybrid inspect job trigger method over gRPC. - - Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - - Returns: - Callable[[~.HybridInspectJobTriggerRequest], - ~.HybridInspectResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_job_trigger' not in self._stubs: - self._stubs['hybrid_inspect_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', - request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_job_trigger'] - - @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - dlp.JobTrigger]: - r"""Return a callable for the get job trigger method over gRPC. - - Gets a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.GetJobTriggerRequest], - ~.JobTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_trigger' not in self._stubs: - self._stubs['get_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetJobTrigger', - request_serializer=dlp.GetJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['get_job_trigger'] - - @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - dlp.ListJobTriggersResponse]: - r"""Return a callable for the list job triggers method over gRPC. - - Lists job triggers. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.ListJobTriggersRequest], - ~.ListJobTriggersResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_job_triggers' not in self._stubs: - self._stubs['list_job_triggers'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListJobTriggers', - request_serializer=dlp.ListJobTriggersRequest.serialize, - response_deserializer=dlp.ListJobTriggersResponse.deserialize, - ) - return self._stubs['list_job_triggers'] - - @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete job trigger method over gRPC. - - Deletes a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.DeleteJobTriggerRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job_trigger' not in self._stubs: - self._stubs['delete_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', - request_serializer=dlp.DeleteJobTriggerRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job_trigger'] - - @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - dlp.DlpJob]: - r"""Return a callable for the activate job trigger method over gRPC. - - Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - Returns: - Callable[[~.ActivateJobTriggerRequest], - ~.DlpJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'activate_job_trigger' not in self._stubs: - self._stubs['activate_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', - request_serializer=dlp.ActivateJobTriggerRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['activate_job_trigger'] - - @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - dlp.DlpJob]: - r"""Return a callable for the create dlp job method over gRPC. - - Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.CreateDlpJobRequest], - ~.DlpJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_dlp_job' not in self._stubs: - self._stubs['create_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDlpJob', - request_serializer=dlp.CreateDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['create_dlp_job'] - - @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - dlp.ListDlpJobsResponse]: - r"""Return a callable for the list dlp jobs method over gRPC. - - Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.ListDlpJobsRequest], - ~.ListDlpJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_dlp_jobs' not in self._stubs: - self._stubs['list_dlp_jobs'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDlpJobs', - request_serializer=dlp.ListDlpJobsRequest.serialize, - response_deserializer=dlp.ListDlpJobsResponse.deserialize, - ) - return self._stubs['list_dlp_jobs'] - - @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - dlp.DlpJob]: - r"""Return a callable for the get dlp job method over gRPC. - - Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.GetDlpJobRequest], - ~.DlpJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_dlp_job' not in self._stubs: - self._stubs['get_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDlpJob', - request_serializer=dlp.GetDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['get_dlp_job'] - - @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete dlp job method over gRPC. - - Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be canceled if possible. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.DeleteDlpJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_dlp_job' not in self._stubs: - self._stubs['delete_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', - request_serializer=dlp.DeleteDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_dlp_job'] - - @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the cancel dlp job method over gRPC. - - Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.CancelDlpJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_dlp_job' not in self._stubs: - self._stubs['cancel_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CancelDlpJob', - request_serializer=dlp.CancelDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['cancel_dlp_job'] - - @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - dlp.StoredInfoType]: - r"""Return a callable for the create stored info type method over gRPC. - - Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.CreateStoredInfoTypeRequest], - ~.StoredInfoType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_stored_info_type' not in self._stubs: - self._stubs['create_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', - request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['create_stored_info_type'] - - @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - dlp.StoredInfoType]: - r"""Return a callable for the update stored info type method over gRPC. - - Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.UpdateStoredInfoTypeRequest], - ~.StoredInfoType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_stored_info_type' not in self._stubs: - self._stubs['update_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', - request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['update_stored_info_type'] - - @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - dlp.StoredInfoType]: - r"""Return a callable for the get stored info type method over gRPC. - - Gets a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.GetStoredInfoTypeRequest], - ~.StoredInfoType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_stored_info_type' not in self._stubs: - self._stubs['get_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', - request_serializer=dlp.GetStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['get_stored_info_type'] - - @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - dlp.ListStoredInfoTypesResponse]: - r"""Return a callable for the list stored info types method over gRPC. - - Lists stored infoTypes. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.ListStoredInfoTypesRequest], - ~.ListStoredInfoTypesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_stored_info_types' not in self._stubs: - self._stubs['list_stored_info_types'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', - request_serializer=dlp.ListStoredInfoTypesRequest.serialize, - response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, - ) - return self._stubs['list_stored_info_types'] - - @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete stored info type method over gRPC. - - Deletes a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.DeleteStoredInfoTypeRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_stored_info_type' not in self._stubs: - self._stubs['delete_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', - request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_stored_info_type'] - - @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - dlp.HybridInspectResponse]: - r"""Return a callable for the hybrid inspect dlp job method over gRPC. - - Inspect hybrid content and store findings to a job. - To review the findings, inspect the job. Inspection will - occur asynchronously. - - Returns: - Callable[[~.HybridInspectDlpJobRequest], - ~.HybridInspectResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_dlp_job' not in self._stubs: - self._stubs['hybrid_inspect_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', - request_serializer=dlp.HybridInspectDlpJobRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_dlp_job'] - - @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the finish dlp job method over gRPC. - - Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. - - Returns: - Callable[[~.FinishDlpJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'finish_dlp_job' not in self._stubs: - self._stubs['finish_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/FinishDlpJob', - request_serializer=dlp.FinishDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['finish_dlp_job'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DlpServiceGrpcTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py deleted file mode 100644 index 03c8bf3c..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,1261 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dlp_v2.types import dlp -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import DlpServiceGrpcTransport - - -class DlpServiceGrpcAsyncIOTransport(DlpServiceTransport): - """gRPC AsyncIO backend transport for DlpService. - - The Cloud Data Loss Prevention (DLP) API is a service that - allows clients to detect the presence of Personally Identifiable - Information (PII) and other privacy-sensitive data in - user-supplied, unstructured data streams, like text blocks or - images. - The service also includes methods for sensitive data redaction - and scheduling of data scans on Google Cloud Platform based data - sets. - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - Awaitable[dlp.InspectContentResponse]]: - r"""Return a callable for the inspect content method over gRPC. - - Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - For how to guides, see - https://cloud.google.com/dlp/docs/inspecting-images and - https://cloud.google.com/dlp/docs/inspecting-text, - - Returns: - Callable[[~.InspectContentRequest], - Awaitable[~.InspectContentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'inspect_content' not in self._stubs: - self._stubs['inspect_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/InspectContent', - request_serializer=dlp.InspectContentRequest.serialize, - response_deserializer=dlp.InspectContentResponse.deserialize, - ) - return self._stubs['inspect_content'] - - @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - Awaitable[dlp.RedactImageResponse]]: - r"""Return a callable for the redact image method over gRPC. - - Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/dlp/docs/redacting-sensitive-data-images - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.RedactImageRequest], - Awaitable[~.RedactImageResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'redact_image' not in self._stubs: - self._stubs['redact_image'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/RedactImage', - request_serializer=dlp.RedactImageRequest.serialize, - response_deserializer=dlp.RedactImageResponse.deserialize, - ) - return self._stubs['redact_image'] - - @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - Awaitable[dlp.DeidentifyContentResponse]]: - r"""Return a callable for the deidentify content method over gRPC. - - De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/dlp/docs/deidentify-sensitive-data - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.DeidentifyContentRequest], - Awaitable[~.DeidentifyContentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'deidentify_content' not in self._stubs: - self._stubs['deidentify_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeidentifyContent', - request_serializer=dlp.DeidentifyContentRequest.serialize, - response_deserializer=dlp.DeidentifyContentResponse.deserialize, - ) - return self._stubs['deidentify_content'] - - @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - Awaitable[dlp.ReidentifyContentResponse]]: - r"""Return a callable for the reidentify content method over gRPC. - - Re-identifies content that has been de-identified. See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - Returns: - Callable[[~.ReidentifyContentRequest], - Awaitable[~.ReidentifyContentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'reidentify_content' not in self._stubs: - self._stubs['reidentify_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ReidentifyContent', - request_serializer=dlp.ReidentifyContentRequest.serialize, - response_deserializer=dlp.ReidentifyContentResponse.deserialize, - ) - return self._stubs['reidentify_content'] - - @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - Awaitable[dlp.ListInfoTypesResponse]]: - r"""Return a callable for the list info types method over gRPC. - - Returns a list of the sensitive information types - that DLP API supports. See - https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - - Returns: - Callable[[~.ListInfoTypesRequest], - Awaitable[~.ListInfoTypesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_info_types' not in self._stubs: - self._stubs['list_info_types'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInfoTypes', - request_serializer=dlp.ListInfoTypesRequest.serialize, - response_deserializer=dlp.ListInfoTypesResponse.deserialize, - ) - return self._stubs['list_info_types'] - - @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - Awaitable[dlp.InspectTemplate]]: - r"""Return a callable for the create inspect template method over gRPC. - - Creates an InspectTemplate for reusing frequently - used configuration for inspecting content, images, and - storage. See - https://cloud.google.com/dlp/docs/creating-templates to - learn more. - - Returns: - Callable[[~.CreateInspectTemplateRequest], - Awaitable[~.InspectTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_inspect_template' not in self._stubs: - self._stubs['create_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', - request_serializer=dlp.CreateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['create_inspect_template'] - - @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - Awaitable[dlp.InspectTemplate]]: - r"""Return a callable for the update inspect template method over gRPC. - - Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.UpdateInspectTemplateRequest], - Awaitable[~.InspectTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_inspect_template' not in self._stubs: - self._stubs['update_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', - request_serializer=dlp.UpdateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['update_inspect_template'] - - @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - Awaitable[dlp.InspectTemplate]]: - r"""Return a callable for the get inspect template method over gRPC. - - Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.GetInspectTemplateRequest], - Awaitable[~.InspectTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_inspect_template' not in self._stubs: - self._stubs['get_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', - request_serializer=dlp.GetInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['get_inspect_template'] - - @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - Awaitable[dlp.ListInspectTemplatesResponse]]: - r"""Return a callable for the list inspect templates method over gRPC. - - Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.ListInspectTemplatesRequest], - Awaitable[~.ListInspectTemplatesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_inspect_templates' not in self._stubs: - self._stubs['list_inspect_templates'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', - request_serializer=dlp.ListInspectTemplatesRequest.serialize, - response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, - ) - return self._stubs['list_inspect_templates'] - - @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete inspect template method over gRPC. - - Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.DeleteInspectTemplateRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_inspect_template' not in self._stubs: - self._stubs['delete_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', - request_serializer=dlp.DeleteInspectTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_inspect_template'] - - @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - Awaitable[dlp.DeidentifyTemplate]]: - r"""Return a callable for the create deidentify template method over gRPC. - - Creates a DeidentifyTemplate for reusing frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.CreateDeidentifyTemplateRequest], - Awaitable[~.DeidentifyTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_deidentify_template' not in self._stubs: - self._stubs['create_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', - request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['create_deidentify_template'] - - @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - Awaitable[dlp.DeidentifyTemplate]]: - r"""Return a callable for the update deidentify template method over gRPC. - - Updates the DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.UpdateDeidentifyTemplateRequest], - Awaitable[~.DeidentifyTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_deidentify_template' not in self._stubs: - self._stubs['update_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', - request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['update_deidentify_template'] - - @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - Awaitable[dlp.DeidentifyTemplate]]: - r"""Return a callable for the get deidentify template method over gRPC. - - Gets a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.GetDeidentifyTemplateRequest], - Awaitable[~.DeidentifyTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_deidentify_template' not in self._stubs: - self._stubs['get_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', - request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['get_deidentify_template'] - - @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - Awaitable[dlp.ListDeidentifyTemplatesResponse]]: - r"""Return a callable for the list deidentify templates method over gRPC. - - Lists DeidentifyTemplates. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.ListDeidentifyTemplatesRequest], - Awaitable[~.ListDeidentifyTemplatesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_deidentify_templates' not in self._stubs: - self._stubs['list_deidentify_templates'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', - request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, - response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, - ) - return self._stubs['list_deidentify_templates'] - - @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete deidentify template method over gRPC. - - Deletes a DeidentifyTemplate. - See - https://cloud.google.com/dlp/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.DeleteDeidentifyTemplateRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_deidentify_template' not in self._stubs: - self._stubs['delete_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', - request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_deidentify_template'] - - @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - Awaitable[dlp.JobTrigger]]: - r"""Return a callable for the create job trigger method over gRPC. - - Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.CreateJobTriggerRequest], - Awaitable[~.JobTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job_trigger' not in self._stubs: - self._stubs['create_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', - request_serializer=dlp.CreateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['create_job_trigger'] - - @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - Awaitable[dlp.JobTrigger]]: - r"""Return a callable for the update job trigger method over gRPC. - - Updates a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.UpdateJobTriggerRequest], - Awaitable[~.JobTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_job_trigger' not in self._stubs: - self._stubs['update_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', - request_serializer=dlp.UpdateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['update_job_trigger'] - - @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - Awaitable[dlp.HybridInspectResponse]]: - r"""Return a callable for the hybrid inspect job trigger method over gRPC. - - Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - - Returns: - Callable[[~.HybridInspectJobTriggerRequest], - Awaitable[~.HybridInspectResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_job_trigger' not in self._stubs: - self._stubs['hybrid_inspect_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', - request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_job_trigger'] - - @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - Awaitable[dlp.JobTrigger]]: - r"""Return a callable for the get job trigger method over gRPC. - - Gets a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.GetJobTriggerRequest], - Awaitable[~.JobTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_trigger' not in self._stubs: - self._stubs['get_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetJobTrigger', - request_serializer=dlp.GetJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['get_job_trigger'] - - @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - Awaitable[dlp.ListJobTriggersResponse]]: - r"""Return a callable for the list job triggers method over gRPC. - - Lists job triggers. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.ListJobTriggersRequest], - Awaitable[~.ListJobTriggersResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_job_triggers' not in self._stubs: - self._stubs['list_job_triggers'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListJobTriggers', - request_serializer=dlp.ListJobTriggersRequest.serialize, - response_deserializer=dlp.ListJobTriggersResponse.deserialize, - ) - return self._stubs['list_job_triggers'] - - @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete job trigger method over gRPC. - - Deletes a job trigger. - See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.DeleteJobTriggerRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job_trigger' not in self._stubs: - self._stubs['delete_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', - request_serializer=dlp.DeleteJobTriggerRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job_trigger'] - - @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - Awaitable[dlp.DlpJob]]: - r"""Return a callable for the activate job trigger method over gRPC. - - Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - Returns: - Callable[[~.ActivateJobTriggerRequest], - Awaitable[~.DlpJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'activate_job_trigger' not in self._stubs: - self._stubs['activate_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', - request_serializer=dlp.ActivateJobTriggerRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['activate_job_trigger'] - - @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - Awaitable[dlp.DlpJob]]: - r"""Return a callable for the create dlp job method over gRPC. - - Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.CreateDlpJobRequest], - Awaitable[~.DlpJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_dlp_job' not in self._stubs: - self._stubs['create_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDlpJob', - request_serializer=dlp.CreateDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['create_dlp_job'] - - @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - Awaitable[dlp.ListDlpJobsResponse]]: - r"""Return a callable for the list dlp jobs method over gRPC. - - Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.ListDlpJobsRequest], - Awaitable[~.ListDlpJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_dlp_jobs' not in self._stubs: - self._stubs['list_dlp_jobs'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDlpJobs', - request_serializer=dlp.ListDlpJobsRequest.serialize, - response_deserializer=dlp.ListDlpJobsResponse.deserialize, - ) - return self._stubs['list_dlp_jobs'] - - @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - Awaitable[dlp.DlpJob]]: - r"""Return a callable for the get dlp job method over gRPC. - - Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.GetDlpJobRequest], - Awaitable[~.DlpJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_dlp_job' not in self._stubs: - self._stubs['get_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDlpJob', - request_serializer=dlp.GetDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['get_dlp_job'] - - @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete dlp job method over gRPC. - - Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be canceled if possible. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.DeleteDlpJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_dlp_job' not in self._stubs: - self._stubs['delete_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', - request_serializer=dlp.DeleteDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_dlp_job'] - - @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the cancel dlp job method over gRPC. - - Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage - and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.CancelDlpJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_dlp_job' not in self._stubs: - self._stubs['cancel_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CancelDlpJob', - request_serializer=dlp.CancelDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['cancel_dlp_job'] - - @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - Awaitable[dlp.StoredInfoType]]: - r"""Return a callable for the create stored info type method over gRPC. - - Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.CreateStoredInfoTypeRequest], - Awaitable[~.StoredInfoType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_stored_info_type' not in self._stubs: - self._stubs['create_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', - request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['create_stored_info_type'] - - @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - Awaitable[dlp.StoredInfoType]]: - r"""Return a callable for the update stored info type method over gRPC. - - Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.UpdateStoredInfoTypeRequest], - Awaitable[~.StoredInfoType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_stored_info_type' not in self._stubs: - self._stubs['update_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', - request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['update_stored_info_type'] - - @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - Awaitable[dlp.StoredInfoType]]: - r"""Return a callable for the get stored info type method over gRPC. - - Gets a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.GetStoredInfoTypeRequest], - Awaitable[~.StoredInfoType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_stored_info_type' not in self._stubs: - self._stubs['get_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', - request_serializer=dlp.GetStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['get_stored_info_type'] - - @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - Awaitable[dlp.ListStoredInfoTypesResponse]]: - r"""Return a callable for the list stored info types method over gRPC. - - Lists stored infoTypes. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.ListStoredInfoTypesRequest], - Awaitable[~.ListStoredInfoTypesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_stored_info_types' not in self._stubs: - self._stubs['list_stored_info_types'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', - request_serializer=dlp.ListStoredInfoTypesRequest.serialize, - response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, - ) - return self._stubs['list_stored_info_types'] - - @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete stored info type method over gRPC. - - Deletes a stored infoType. - See - https://cloud.google.com/dlp/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.DeleteStoredInfoTypeRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_stored_info_type' not in self._stubs: - self._stubs['delete_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', - request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_stored_info_type'] - - @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - Awaitable[dlp.HybridInspectResponse]]: - r"""Return a callable for the hybrid inspect dlp job method over gRPC. - - Inspect hybrid content and store findings to a job. - To review the findings, inspect the job. Inspection will - occur asynchronously. - - Returns: - Callable[[~.HybridInspectDlpJobRequest], - Awaitable[~.HybridInspectResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_dlp_job' not in self._stubs: - self._stubs['hybrid_inspect_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', - request_serializer=dlp.HybridInspectDlpJobRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_dlp_job'] - - @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the finish dlp job method over gRPC. - - Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. - - Returns: - Callable[[~.FinishDlpJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'finish_dlp_job' not in self._stubs: - self._stubs['finish_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/FinishDlpJob', - request_serializer=dlp.FinishDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['finish_dlp_job'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'DlpServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py deleted file mode 100644 index 789b6267..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py +++ /dev/null @@ -1,4325 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.dlp_v2.types import dlp -from google.protobuf import empty_pb2 # type: ignore - -from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class DlpServiceRestInterceptor: - """Interceptor for DlpService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the DlpServiceRestTransport. - - .. code-block:: python - class MyCustomDlpServiceInterceptor(DlpServiceRestInterceptor): - def pre_activate_job_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_activate_job_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_cancel_dlp_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_create_deidentify_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_deidentify_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_dlp_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_dlp_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_inspect_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_inspect_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_job_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_job_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_stored_info_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_stored_info_type(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_deidentify_content(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_deidentify_content(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_deidentify_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_dlp_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_inspect_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_job_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_stored_info_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_finish_dlp_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_deidentify_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_deidentify_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_dlp_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_dlp_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_inspect_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_inspect_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_job_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_job_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_stored_info_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_stored_info_type(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_hybrid_inspect_dlp_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_hybrid_inspect_dlp_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_hybrid_inspect_job_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_hybrid_inspect_job_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_inspect_content(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_inspect_content(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_deidentify_templates(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_deidentify_templates(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_dlp_jobs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_dlp_jobs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_info_types(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_info_types(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_inspect_templates(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_inspect_templates(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_job_triggers(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_job_triggers(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_stored_info_types(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_stored_info_types(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_redact_image(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_redact_image(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_reidentify_content(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_reidentify_content(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_deidentify_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_deidentify_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_inspect_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_inspect_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_job_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_job_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_stored_info_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_stored_info_type(self, response): - logging.log(f"Received response: {response}") - return response - - transport = DlpServiceRestTransport(interceptor=MyCustomDlpServiceInterceptor()) - client = DlpServiceClient(transport=transport) - - - """ - def pre_activate_job_trigger(self, request: dlp.ActivateJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ActivateJobTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for activate_job_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_activate_job_trigger(self, response: dlp.DlpJob) -> dlp.DlpJob: - """Post-rpc interceptor for activate_job_trigger - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_cancel_dlp_job(self, request: dlp.CancelDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CancelDlpJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_dlp_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_create_deidentify_template(self, request: dlp.CreateDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_deidentify_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_create_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: - """Post-rpc interceptor for create_deidentify_template - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_create_dlp_job(self, request: dlp.CreateDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateDlpJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_dlp_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_create_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: - """Post-rpc interceptor for create_dlp_job - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_create_inspect_template(self, request: dlp.CreateInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateInspectTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_inspect_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_create_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: - """Post-rpc interceptor for create_inspect_template - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_create_job_trigger(self, request: dlp.CreateJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateJobTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_job_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_create_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: - """Post-rpc interceptor for create_job_trigger - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_create_stored_info_type(self, request: dlp.CreateStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.CreateStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_stored_info_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_create_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: - """Post-rpc interceptor for create_stored_info_type - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_deidentify_content(self, request: dlp.DeidentifyContentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeidentifyContentRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for deidentify_content - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_deidentify_content(self, response: dlp.DeidentifyContentResponse) -> dlp.DeidentifyContentResponse: - """Post-rpc interceptor for deidentify_content - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_delete_deidentify_template(self, request: dlp.DeleteDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_deidentify_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_delete_dlp_job(self, request: dlp.DeleteDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteDlpJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_dlp_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_delete_inspect_template(self, request: dlp.DeleteInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteInspectTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_inspect_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_delete_job_trigger(self, request: dlp.DeleteJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteJobTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_job_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_delete_stored_info_type(self, request: dlp.DeleteStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.DeleteStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_stored_info_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_finish_dlp_job(self, request: dlp.FinishDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.FinishDlpJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for finish_dlp_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_get_deidentify_template(self, request: dlp.GetDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_deidentify_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: - """Post-rpc interceptor for get_deidentify_template - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_get_dlp_job(self, request: dlp.GetDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetDlpJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_dlp_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: - """Post-rpc interceptor for get_dlp_job - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_get_inspect_template(self, request: dlp.GetInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetInspectTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_inspect_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: - """Post-rpc interceptor for get_inspect_template - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_get_job_trigger(self, request: dlp.GetJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetJobTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_job_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: - """Post-rpc interceptor for get_job_trigger - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_get_stored_info_type(self, request: dlp.GetStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.GetStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_stored_info_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: - """Post-rpc interceptor for get_stored_info_type - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_hybrid_inspect_dlp_job(self, request: dlp.HybridInspectDlpJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.HybridInspectDlpJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for hybrid_inspect_dlp_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_hybrid_inspect_dlp_job(self, response: dlp.HybridInspectResponse) -> dlp.HybridInspectResponse: - """Post-rpc interceptor for hybrid_inspect_dlp_job - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_hybrid_inspect_job_trigger(self, request: dlp.HybridInspectJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.HybridInspectJobTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for hybrid_inspect_job_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_hybrid_inspect_job_trigger(self, response: dlp.HybridInspectResponse) -> dlp.HybridInspectResponse: - """Post-rpc interceptor for hybrid_inspect_job_trigger - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_inspect_content(self, request: dlp.InspectContentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.InspectContentRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for inspect_content - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_inspect_content(self, response: dlp.InspectContentResponse) -> dlp.InspectContentResponse: - """Post-rpc interceptor for inspect_content - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_list_deidentify_templates(self, request: dlp.ListDeidentifyTemplatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListDeidentifyTemplatesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_deidentify_templates - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_deidentify_templates(self, response: dlp.ListDeidentifyTemplatesResponse) -> dlp.ListDeidentifyTemplatesResponse: - """Post-rpc interceptor for list_deidentify_templates - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_list_dlp_jobs(self, request: dlp.ListDlpJobsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListDlpJobsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_dlp_jobs - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_dlp_jobs(self, response: dlp.ListDlpJobsResponse) -> dlp.ListDlpJobsResponse: - """Post-rpc interceptor for list_dlp_jobs - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_list_info_types(self, request: dlp.ListInfoTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListInfoTypesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_info_types - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_info_types(self, response: dlp.ListInfoTypesResponse) -> dlp.ListInfoTypesResponse: - """Post-rpc interceptor for list_info_types - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_list_inspect_templates(self, request: dlp.ListInspectTemplatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListInspectTemplatesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_inspect_templates - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_inspect_templates(self, response: dlp.ListInspectTemplatesResponse) -> dlp.ListInspectTemplatesResponse: - """Post-rpc interceptor for list_inspect_templates - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_list_job_triggers(self, request: dlp.ListJobTriggersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListJobTriggersRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_job_triggers - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_job_triggers(self, response: dlp.ListJobTriggersResponse) -> dlp.ListJobTriggersResponse: - """Post-rpc interceptor for list_job_triggers - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_list_stored_info_types(self, request: dlp.ListStoredInfoTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ListStoredInfoTypesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_stored_info_types - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_stored_info_types(self, response: dlp.ListStoredInfoTypesResponse) -> dlp.ListStoredInfoTypesResponse: - """Post-rpc interceptor for list_stored_info_types - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_redact_image(self, request: dlp.RedactImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.RedactImageRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for redact_image - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_redact_image(self, response: dlp.RedactImageResponse) -> dlp.RedactImageResponse: - """Post-rpc interceptor for redact_image - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_reidentify_content(self, request: dlp.ReidentifyContentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.ReidentifyContentRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for reidentify_content - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_reidentify_content(self, response: dlp.ReidentifyContentResponse) -> dlp.ReidentifyContentResponse: - """Post-rpc interceptor for reidentify_content - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_update_deidentify_template(self, request: dlp.UpdateDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_deidentify_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_update_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: - """Post-rpc interceptor for update_deidentify_template - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_update_inspect_template(self, request: dlp.UpdateInspectTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateInspectTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_inspect_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_update_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: - """Post-rpc interceptor for update_inspect_template - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_update_job_trigger(self, request: dlp.UpdateJobTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateJobTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_job_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_update_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: - """Post-rpc interceptor for update_job_trigger - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - def pre_update_stored_info_type(self, request: dlp.UpdateStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[dlp.UpdateStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_stored_info_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_update_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: - """Post-rpc interceptor for update_stored_info_type - - Override in a subclass to manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class DlpServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: DlpServiceRestInterceptor - - -class DlpServiceRestTransport(DlpServiceTransport): - """REST backend transport for DlpService. - - The Cloud Data Loss Prevention (DLP) API is a service that - allows clients to detect the presence of Personally Identifiable - Information (PII) and other privacy-sensitive data in - user-supplied, unstructured data streams, like text blocks or - images. - The service also includes methods for sensitive data redaction - and scheduling of data scans on Google Cloud Platform based data - sets. - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[DlpServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or DlpServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _ActivateJobTrigger(DlpServiceRestStub): - def __hash__(self): - return hash("ActivateJobTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ActivateJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DlpJob: - r"""Call the activate job trigger method over HTTP. - - Args: - request (~.dlp.ActivateJobTriggerRequest): - The request object. Request message for - ActivateJobTrigger. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.DlpJob: - Combines all of the information about - a DLP job. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/jobTriggers/*}:activate', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}:activate', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_activate_job_trigger(request, metadata) - pb_request = dlp.ActivateJobTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DlpJob() - pb_resp = dlp.DlpJob.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_activate_job_trigger(resp) - return resp - - class _CancelDlpJob(DlpServiceRestStub): - def __hash__(self): - return hash("CancelDlpJob") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.CancelDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the cancel dlp job method over HTTP. - - Args: - request (~.dlp.CancelDlpJobRequest): - The request object. The request message for canceling a - DLP job. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/dlpJobs/*}:cancel', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:cancel', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_cancel_dlp_job(request, metadata) - pb_request = dlp.CancelDlpJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _CreateDeidentifyTemplate(DlpServiceRestStub): - def __hash__(self): - return hash("CreateDeidentifyTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.CreateDeidentifyTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DeidentifyTemplate: - r"""Call the create deidentify - template method over HTTP. - - Args: - request (~.dlp.CreateDeidentifyTemplateRequest): - The request object. Request message for - CreateDeidentifyTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*}/deidentifyTemplates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/deidentifyTemplates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/deidentifyTemplates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/deidentifyTemplates', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_create_deidentify_template(request, metadata) - pb_request = dlp.CreateDeidentifyTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DeidentifyTemplate() - pb_resp = dlp.DeidentifyTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_deidentify_template(resp) - return resp - - class _CreateDlpJob(DlpServiceRestStub): - def __hash__(self): - return hash("CreateDlpJob") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.CreateDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DlpJob: - r"""Call the create dlp job method over HTTP. - - Args: - request (~.dlp.CreateDlpJobRequest): - The request object. Request message for - CreateDlpJobRequest. Used to initiate - long running jobs such as calculating - risk metrics or inspecting Google Cloud - Storage. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.DlpJob: - Combines all of the information about - a DLP job. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/dlpJobs', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/dlpJobs', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_create_dlp_job(request, metadata) - pb_request = dlp.CreateDlpJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DlpJob() - pb_resp = dlp.DlpJob.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_dlp_job(resp) - return resp - - class _CreateInspectTemplate(DlpServiceRestStub): - def __hash__(self): - return hash("CreateInspectTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.CreateInspectTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.InspectTemplate: - r"""Call the create inspect template method over HTTP. - - Args: - request (~.dlp.CreateInspectTemplateRequest): - The request object. Request message for - CreateInspectTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*}/inspectTemplates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/inspectTemplates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/inspectTemplates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/inspectTemplates', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_create_inspect_template(request, metadata) - pb_request = dlp.CreateInspectTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.InspectTemplate() - pb_resp = dlp.InspectTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_inspect_template(resp) - return resp - - class _CreateJobTrigger(DlpServiceRestStub): - def __hash__(self): - return hash("CreateJobTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.CreateJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.JobTrigger: - r"""Call the create job trigger method over HTTP. - - Args: - request (~.dlp.CreateJobTriggerRequest): - The request object. Request message for CreateJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/jobTriggers', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/jobTriggers', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/jobTriggers', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_create_job_trigger(request, metadata) - pb_request = dlp.CreateJobTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.JobTrigger() - pb_resp = dlp.JobTrigger.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_job_trigger(resp) - return resp - - class _CreateStoredInfoType(DlpServiceRestStub): - def __hash__(self): - return hash("CreateStoredInfoType") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.CreateStoredInfoTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.StoredInfoType: - r"""Call the create stored info type method over HTTP. - - Args: - request (~.dlp.CreateStoredInfoTypeRequest): - The request object. Request message for - CreateStoredInfoType. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*}/storedInfoTypes', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/storedInfoTypes', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/storedInfoTypes', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/storedInfoTypes', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_create_stored_info_type(request, metadata) - pb_request = dlp.CreateStoredInfoTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.StoredInfoType() - pb_resp = dlp.StoredInfoType.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_stored_info_type(resp) - return resp - - class _DeidentifyContent(DlpServiceRestStub): - def __hash__(self): - return hash("DeidentifyContent") - - def __call__(self, - request: dlp.DeidentifyContentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DeidentifyContentResponse: - r"""Call the deidentify content method over HTTP. - - Args: - request (~.dlp.DeidentifyContentRequest): - The request object. Request to de-identify a ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.DeidentifyContentResponse: - Results of de-identifying a - ContentItem. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/content:deidentify', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/content:deidentify', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_deidentify_content(request, metadata) - pb_request = dlp.DeidentifyContentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DeidentifyContentResponse() - pb_resp = dlp.DeidentifyContentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_deidentify_content(resp) - return resp - - class _DeleteDeidentifyTemplate(DlpServiceRestStub): - def __hash__(self): - return hash("DeleteDeidentifyTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.DeleteDeidentifyTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete deidentify - template method over HTTP. - - Args: - request (~.dlp.DeleteDeidentifyTemplateRequest): - The request object. Request message for - DeleteDeidentifyTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_deidentify_template(request, metadata) - pb_request = dlp.DeleteDeidentifyTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteDlpJob(DlpServiceRestStub): - def __hash__(self): - return hash("DeleteDlpJob") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.DeleteDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete dlp job method over HTTP. - - Args: - request (~.dlp.DeleteDlpJobRequest): - The request object. The request message for deleting a - DLP job. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/dlpJobs/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_dlp_job(request, metadata) - pb_request = dlp.DeleteDlpJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteInspectTemplate(DlpServiceRestStub): - def __hash__(self): - return hash("DeleteInspectTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.DeleteInspectTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete inspect template method over HTTP. - - Args: - request (~.dlp.DeleteInspectTemplateRequest): - The request object. Request message for - DeleteInspectTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/inspectTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_inspect_template(request, metadata) - pb_request = dlp.DeleteInspectTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteJobTrigger(DlpServiceRestStub): - def __hash__(self): - return hash("DeleteJobTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.DeleteJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete job trigger method over HTTP. - - Args: - request (~.dlp.DeleteJobTriggerRequest): - The request object. Request message for DeleteJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/jobTriggers/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_job_trigger(request, metadata) - pb_request = dlp.DeleteJobTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteStoredInfoType(DlpServiceRestStub): - def __hash__(self): - return hash("DeleteStoredInfoType") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.DeleteStoredInfoTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete stored info type method over HTTP. - - Args: - request (~.dlp.DeleteStoredInfoTypeRequest): - The request object. Request message for - DeleteStoredInfoType. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_stored_info_type(request, metadata) - pb_request = dlp.DeleteStoredInfoTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _FinishDlpJob(DlpServiceRestStub): - def __hash__(self): - return hash("FinishDlpJob") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.FinishDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the finish dlp job method over HTTP. - - Args: - request (~.dlp.FinishDlpJobRequest): - The request object. The request message for finishing a - DLP hybrid job. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:finish', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_finish_dlp_job(request, metadata) - pb_request = dlp.FinishDlpJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetDeidentifyTemplate(DlpServiceRestStub): - def __hash__(self): - return hash("GetDeidentifyTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.GetDeidentifyTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DeidentifyTemplate: - r"""Call the get deidentify template method over HTTP. - - Args: - request (~.dlp.GetDeidentifyTemplateRequest): - The request object. Request message for - GetDeidentifyTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', - }, - ] - request, metadata = self._interceptor.pre_get_deidentify_template(request, metadata) - pb_request = dlp.GetDeidentifyTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DeidentifyTemplate() - pb_resp = dlp.DeidentifyTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_deidentify_template(resp) - return resp - - class _GetDlpJob(DlpServiceRestStub): - def __hash__(self): - return hash("GetDlpJob") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.GetDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DlpJob: - r"""Call the get dlp job method over HTTP. - - Args: - request (~.dlp.GetDlpJobRequest): - The request object. The request message for [DlpJobs.GetDlpJob][]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.DlpJob: - Combines all of the information about - a DLP job. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/dlpJobs/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}', - }, - ] - request, metadata = self._interceptor.pre_get_dlp_job(request, metadata) - pb_request = dlp.GetDlpJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DlpJob() - pb_resp = dlp.DlpJob.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_dlp_job(resp) - return resp - - class _GetInspectTemplate(DlpServiceRestStub): - def __hash__(self): - return hash("GetInspectTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.GetInspectTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.InspectTemplate: - r"""Call the get inspect template method over HTTP. - - Args: - request (~.dlp.GetInspectTemplateRequest): - The request object. Request message for - GetInspectTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/inspectTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', - }, - ] - request, metadata = self._interceptor.pre_get_inspect_template(request, metadata) - pb_request = dlp.GetInspectTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.InspectTemplate() - pb_resp = dlp.InspectTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_inspect_template(resp) - return resp - - class _GetJobTrigger(DlpServiceRestStub): - def __hash__(self): - return hash("GetJobTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.GetJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.JobTrigger: - r"""Call the get job trigger method over HTTP. - - Args: - request (~.dlp.GetJobTriggerRequest): - The request object. Request message for GetJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/jobTriggers/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', - }, - ] - request, metadata = self._interceptor.pre_get_job_trigger(request, metadata) - pb_request = dlp.GetJobTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.JobTrigger() - pb_resp = dlp.JobTrigger.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_job_trigger(resp) - return resp - - class _GetStoredInfoType(DlpServiceRestStub): - def __hash__(self): - return hash("GetStoredInfoType") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.GetStoredInfoTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.StoredInfoType: - r"""Call the get stored info type method over HTTP. - - Args: - request (~.dlp.GetStoredInfoTypeRequest): - The request object. Request message for - GetStoredInfoType. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', - }, - ] - request, metadata = self._interceptor.pre_get_stored_info_type(request, metadata) - pb_request = dlp.GetStoredInfoTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.StoredInfoType() - pb_resp = dlp.StoredInfoType.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_stored_info_type(resp) - return resp - - class _HybridInspectDlpJob(DlpServiceRestStub): - def __hash__(self): - return hash("HybridInspectDlpJob") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.HybridInspectDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.HybridInspectResponse: - r"""Call the hybrid inspect dlp job method over HTTP. - - Args: - request (~.dlp.HybridInspectDlpJobRequest): - The request object. Request to search for potentially - sensitive info in a custom location. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_hybrid_inspect_dlp_job(request, metadata) - pb_request = dlp.HybridInspectDlpJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.HybridInspectResponse() - pb_resp = dlp.HybridInspectResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_hybrid_inspect_dlp_job(resp) - return resp - - class _HybridInspectJobTrigger(DlpServiceRestStub): - def __hash__(self): - return hash("HybridInspectJobTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.HybridInspectJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.HybridInspectResponse: - r"""Call the hybrid inspect job - trigger method over HTTP. - - Args: - request (~.dlp.HybridInspectJobTriggerRequest): - The request object. Request to search for potentially - sensitive info in a custom location. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_hybrid_inspect_job_trigger(request, metadata) - pb_request = dlp.HybridInspectJobTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.HybridInspectResponse() - pb_resp = dlp.HybridInspectResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_hybrid_inspect_job_trigger(resp) - return resp - - class _InspectContent(DlpServiceRestStub): - def __hash__(self): - return hash("InspectContent") - - def __call__(self, - request: dlp.InspectContentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.InspectContentResponse: - r"""Call the inspect content method over HTTP. - - Args: - request (~.dlp.InspectContentRequest): - The request object. Request to search for potentially - sensitive info in a ContentItem. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.InspectContentResponse: - Results of inspecting an item. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/content:inspect', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/content:inspect', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_inspect_content(request, metadata) - pb_request = dlp.InspectContentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.InspectContentResponse() - pb_resp = dlp.InspectContentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_inspect_content(resp) - return resp - - class _ListDeidentifyTemplates(DlpServiceRestStub): - def __hash__(self): - return hash("ListDeidentifyTemplates") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ListDeidentifyTemplatesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ListDeidentifyTemplatesResponse: - r"""Call the list deidentify templates method over HTTP. - - Args: - request (~.dlp.ListDeidentifyTemplatesRequest): - The request object. Request message for - ListDeidentifyTemplates. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.ListDeidentifyTemplatesResponse: - Response message for - ListDeidentifyTemplates. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*}/deidentifyTemplates', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/deidentifyTemplates', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/deidentifyTemplates', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/deidentifyTemplates', - }, - ] - request, metadata = self._interceptor.pre_list_deidentify_templates(request, metadata) - pb_request = dlp.ListDeidentifyTemplatesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListDeidentifyTemplatesResponse() - pb_resp = dlp.ListDeidentifyTemplatesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_deidentify_templates(resp) - return resp - - class _ListDlpJobs(DlpServiceRestStub): - def __hash__(self): - return hash("ListDlpJobs") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ListDlpJobsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ListDlpJobsResponse: - r"""Call the list dlp jobs method over HTTP. - - Args: - request (~.dlp.ListDlpJobsRequest): - The request object. The request message for listing DLP - jobs. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.ListDlpJobsResponse: - The response message for listing DLP - jobs. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/dlpJobs', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/dlpJobs', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/dlpJobs', - }, - ] - request, metadata = self._interceptor.pre_list_dlp_jobs(request, metadata) - pb_request = dlp.ListDlpJobsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListDlpJobsResponse() - pb_resp = dlp.ListDlpJobsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_dlp_jobs(resp) - return resp - - class _ListInfoTypes(DlpServiceRestStub): - def __hash__(self): - return hash("ListInfoTypes") - - def __call__(self, - request: dlp.ListInfoTypesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ListInfoTypesResponse: - r"""Call the list info types method over HTTP. - - Args: - request (~.dlp.ListInfoTypesRequest): - The request object. Request for the list of infoTypes. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.ListInfoTypesResponse: - Response to the ListInfoTypes - request. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/infoTypes', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=locations/*}/infoTypes', - }, - ] - request, metadata = self._interceptor.pre_list_info_types(request, metadata) - pb_request = dlp.ListInfoTypesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListInfoTypesResponse() - pb_resp = dlp.ListInfoTypesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_info_types(resp) - return resp - - class _ListInspectTemplates(DlpServiceRestStub): - def __hash__(self): - return hash("ListInspectTemplates") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ListInspectTemplatesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ListInspectTemplatesResponse: - r"""Call the list inspect templates method over HTTP. - - Args: - request (~.dlp.ListInspectTemplatesRequest): - The request object. Request message for - ListInspectTemplates. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.ListInspectTemplatesResponse: - Response message for - ListInspectTemplates. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*}/inspectTemplates', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/inspectTemplates', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/inspectTemplates', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/inspectTemplates', - }, - ] - request, metadata = self._interceptor.pre_list_inspect_templates(request, metadata) - pb_request = dlp.ListInspectTemplatesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListInspectTemplatesResponse() - pb_resp = dlp.ListInspectTemplatesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_inspect_templates(resp) - return resp - - class _ListJobTriggers(DlpServiceRestStub): - def __hash__(self): - return hash("ListJobTriggers") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ListJobTriggersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ListJobTriggersResponse: - r"""Call the list job triggers method over HTTP. - - Args: - request (~.dlp.ListJobTriggersRequest): - The request object. Request message for ListJobTriggers. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.ListJobTriggersResponse: - Response message for ListJobTriggers. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/jobTriggers', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/jobTriggers', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/jobTriggers', - }, - ] - request, metadata = self._interceptor.pre_list_job_triggers(request, metadata) - pb_request = dlp.ListJobTriggersRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListJobTriggersResponse() - pb_resp = dlp.ListJobTriggersResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_job_triggers(resp) - return resp - - class _ListStoredInfoTypes(DlpServiceRestStub): - def __hash__(self): - return hash("ListStoredInfoTypes") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ListStoredInfoTypesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ListStoredInfoTypesResponse: - r"""Call the list stored info types method over HTTP. - - Args: - request (~.dlp.ListStoredInfoTypesRequest): - The request object. Request message for - ListStoredInfoTypes. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.ListStoredInfoTypesResponse: - Response message for - ListStoredInfoTypes. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*}/storedInfoTypes', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/storedInfoTypes', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/storedInfoTypes', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/storedInfoTypes', - }, - ] - request, metadata = self._interceptor.pre_list_stored_info_types(request, metadata) - pb_request = dlp.ListStoredInfoTypesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListStoredInfoTypesResponse() - pb_resp = dlp.ListStoredInfoTypesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_stored_info_types(resp) - return resp - - class _RedactImage(DlpServiceRestStub): - def __hash__(self): - return hash("RedactImage") - - def __call__(self, - request: dlp.RedactImageRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.RedactImageResponse: - r"""Call the redact image method over HTTP. - - Args: - request (~.dlp.RedactImageRequest): - The request object. Request to search for potentially - sensitive info in an image and redact it - by covering it with a colored rectangle. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.RedactImageResponse: - Results of redacting an image. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/image:redact', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/image:redact', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_redact_image(request, metadata) - pb_request = dlp.RedactImageRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.RedactImageResponse() - pb_resp = dlp.RedactImageResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_redact_image(resp) - return resp - - class _ReidentifyContent(DlpServiceRestStub): - def __hash__(self): - return hash("ReidentifyContent") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.ReidentifyContentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.ReidentifyContentResponse: - r"""Call the reidentify content method over HTTP. - - Args: - request (~.dlp.ReidentifyContentRequest): - The request object. Request to re-identify an item. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.ReidentifyContentResponse: - Results of re-identifying an item. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/content:reidentify', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/content:reidentify', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_reidentify_content(request, metadata) - pb_request = dlp.ReidentifyContentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ReidentifyContentResponse() - pb_resp = dlp.ReidentifyContentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_reidentify_content(resp) - return resp - - class _UpdateDeidentifyTemplate(DlpServiceRestStub): - def __hash__(self): - return hash("UpdateDeidentifyTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.UpdateDeidentifyTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.DeidentifyTemplate: - r"""Call the update deidentify - template method over HTTP. - - Args: - request (~.dlp.UpdateDeidentifyTemplateRequest): - The request object. Request message for - UpdateDeidentifyTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_update_deidentify_template(request, metadata) - pb_request = dlp.UpdateDeidentifyTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DeidentifyTemplate() - pb_resp = dlp.DeidentifyTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_deidentify_template(resp) - return resp - - class _UpdateInspectTemplate(DlpServiceRestStub): - def __hash__(self): - return hash("UpdateInspectTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.UpdateInspectTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.InspectTemplate: - r"""Call the update inspect template method over HTTP. - - Args: - request (~.dlp.UpdateInspectTemplateRequest): - The request object. Request message for - UpdateInspectTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates - to learn more. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/inspectTemplates/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_update_inspect_template(request, metadata) - pb_request = dlp.UpdateInspectTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.InspectTemplate() - pb_resp = dlp.InspectTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_inspect_template(resp) - return resp - - class _UpdateJobTrigger(DlpServiceRestStub): - def __hash__(self): - return hash("UpdateJobTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.UpdateJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.JobTrigger: - r"""Call the update job trigger method over HTTP. - - Args: - request (~.dlp.UpdateJobTriggerRequest): - The request object. Request message for UpdateJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers - to learn more. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/jobTriggers/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_update_job_trigger(request, metadata) - pb_request = dlp.UpdateJobTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.JobTrigger() - pb_resp = dlp.JobTrigger.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_job_trigger(resp) - return resp - - class _UpdateStoredInfoType(DlpServiceRestStub): - def __hash__(self): - return hash("UpdateStoredInfoType") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: dlp.UpdateStoredInfoTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> dlp.StoredInfoType: - r"""Call the update stored info type method over HTTP. - - Args: - request (~.dlp.UpdateStoredInfoTypeRequest): - The request object. Request message for - UpdateStoredInfoType. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.dlp.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', - 'body': '*', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_update_stored_info_type(request, metadata) - pb_request = dlp.UpdateStoredInfoTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.StoredInfoType() - pb_resp = dlp.StoredInfoType.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_stored_info_type(resp) - return resp - - @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - dlp.DlpJob]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ActivateJobTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CancelDlpJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - dlp.DlpJob]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDlpJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - dlp.InspectTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - dlp.JobTrigger]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateJobTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - dlp.StoredInfoType]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore - - @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - dlp.DeidentifyContentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeidentifyContent(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDlpJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteInspectTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteJobTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteStoredInfoType(self._session, self._host, self._interceptor) # type: ignore - - @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._FinishDlpJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - dlp.DlpJob]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDlpJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - dlp.InspectTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetInspectTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - dlp.JobTrigger]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetJobTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - dlp.StoredInfoType]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetStoredInfoType(self._session, self._host, self._interceptor) # type: ignore - - @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - dlp.HybridInspectResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._HybridInspectDlpJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - dlp.HybridInspectResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._HybridInspectJobTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - dlp.InspectContentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._InspectContent(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - dlp.ListDeidentifyTemplatesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDeidentifyTemplates(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - dlp.ListDlpJobsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDlpJobs(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - dlp.ListInfoTypesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInfoTypes(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - dlp.ListInspectTemplatesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInspectTemplates(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - dlp.ListJobTriggersResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListJobTriggers(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - dlp.ListStoredInfoTypesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListStoredInfoTypes(self._session, self._host, self._interceptor) # type: ignore - - @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - dlp.RedactImageResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RedactImage(self._session, self._host, self._interceptor) # type: ignore - - @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - dlp.ReidentifyContentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ReidentifyContent(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - dlp.InspectTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - dlp.JobTrigger]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateJobTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - dlp.StoredInfoType]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'DlpServiceRestTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py deleted file mode 100644 index 5bc3d949..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py +++ /dev/null @@ -1,390 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .dlp import ( - Action, - ActivateJobTriggerRequest, - AnalyzeDataSourceRiskDetails, - BoundingBox, - BucketingConfig, - ByteContentItem, - CancelDlpJobRequest, - CharacterMaskConfig, - CharsToIgnore, - Color, - Container, - ContentItem, - ContentLocation, - CreateDeidentifyTemplateRequest, - CreateDlpJobRequest, - CreateInspectTemplateRequest, - CreateJobTriggerRequest, - CreateStoredInfoTypeRequest, - CryptoDeterministicConfig, - CryptoHashConfig, - CryptoKey, - CryptoReplaceFfxFpeConfig, - DataProfileAction, - DataProfileConfigSnapshot, - DataProfileJobConfig, - DataProfileLocation, - DataProfilePubSubCondition, - DataProfilePubSubMessage, - DataRiskLevel, - DateShiftConfig, - DateTime, - DeidentifyConfig, - DeidentifyContentRequest, - DeidentifyContentResponse, - DeidentifyTemplate, - DeleteDeidentifyTemplateRequest, - DeleteDlpJobRequest, - DeleteInspectTemplateRequest, - DeleteJobTriggerRequest, - DeleteStoredInfoTypeRequest, - DlpJob, - DocumentLocation, - Error, - ExcludeByHotword, - ExcludeInfoTypes, - ExclusionRule, - FieldTransformation, - Finding, - FinishDlpJobRequest, - FixedSizeBucketingConfig, - GetDeidentifyTemplateRequest, - GetDlpJobRequest, - GetInspectTemplateRequest, - GetJobTriggerRequest, - GetStoredInfoTypeRequest, - HybridContentItem, - HybridFindingDetails, - HybridInspectDlpJobRequest, - HybridInspectJobTriggerRequest, - HybridInspectResponse, - HybridInspectStatistics, - ImageLocation, - ImageTransformations, - InfoTypeCategory, - InfoTypeDescription, - InfoTypeStats, - InfoTypeSummary, - InfoTypeTransformations, - InspectConfig, - InspectContentRequest, - InspectContentResponse, - InspectDataSourceDetails, - InspectionRule, - InspectionRuleSet, - InspectJobConfig, - InspectResult, - InspectTemplate, - JobTrigger, - KmsWrappedCryptoKey, - LargeCustomDictionaryConfig, - LargeCustomDictionaryStats, - ListDeidentifyTemplatesRequest, - ListDeidentifyTemplatesResponse, - ListDlpJobsRequest, - ListDlpJobsResponse, - ListInfoTypesRequest, - ListInfoTypesResponse, - ListInspectTemplatesRequest, - ListInspectTemplatesResponse, - ListJobTriggersRequest, - ListJobTriggersResponse, - ListStoredInfoTypesRequest, - ListStoredInfoTypesResponse, - Location, - Manual, - MetadataLocation, - OtherInfoTypeSummary, - OutputStorageConfig, - PrimitiveTransformation, - PrivacyMetric, - ProfileStatus, - QuasiId, - QuoteInfo, - Range, - RecordCondition, - RecordLocation, - RecordSuppression, - RecordTransformation, - RecordTransformations, - RedactConfig, - RedactImageRequest, - RedactImageResponse, - ReidentifyContentRequest, - ReidentifyContentResponse, - ReplaceDictionaryConfig, - ReplaceValueConfig, - ReplaceWithInfoTypeConfig, - RiskAnalysisJobConfig, - Schedule, - StatisticalTable, - StorageMetadataLabel, - StoredInfoType, - StoredInfoTypeConfig, - StoredInfoTypeStats, - StoredInfoTypeVersion, - Table, - TableDataProfile, - TableLocation, - TimePartConfig, - TransformationConfig, - TransformationDescription, - TransformationDetails, - TransformationDetailsStorageConfig, - TransformationErrorHandling, - TransformationLocation, - TransformationOverview, - TransformationResultStatus, - TransformationSummary, - TransientCryptoKey, - UnwrappedCryptoKey, - UpdateDeidentifyTemplateRequest, - UpdateInspectTemplateRequest, - UpdateJobTriggerRequest, - UpdateStoredInfoTypeRequest, - Value, - ValueFrequency, - VersionDescription, - ContentOption, - DlpJobType, - EncryptionStatus, - InfoTypeSupportedBy, - MatchingType, - MetadataType, - RelationalOperator, - ResourceVisibility, - StoredInfoTypeState, - TransformationContainerType, - TransformationResultStatusType, - TransformationType, -) -from .storage import ( - BigQueryField, - BigQueryKey, - BigQueryOptions, - BigQueryTable, - CloudStorageFileSet, - CloudStorageOptions, - CloudStoragePath, - CloudStorageRegexFileSet, - CustomInfoType, - DatastoreKey, - DatastoreOptions, - EntityId, - FieldId, - HybridOptions, - InfoType, - Key, - KindExpression, - PartitionId, - RecordKey, - SensitivityScore, - StorageConfig, - StoredType, - TableOptions, - FileType, - Likelihood, -) - -__all__ = ( - 'Action', - 'ActivateJobTriggerRequest', - 'AnalyzeDataSourceRiskDetails', - 'BoundingBox', - 'BucketingConfig', - 'ByteContentItem', - 'CancelDlpJobRequest', - 'CharacterMaskConfig', - 'CharsToIgnore', - 'Color', - 'Container', - 'ContentItem', - 'ContentLocation', - 'CreateDeidentifyTemplateRequest', - 'CreateDlpJobRequest', - 'CreateInspectTemplateRequest', - 'CreateJobTriggerRequest', - 'CreateStoredInfoTypeRequest', - 'CryptoDeterministicConfig', - 'CryptoHashConfig', - 'CryptoKey', - 'CryptoReplaceFfxFpeConfig', - 'DataProfileAction', - 'DataProfileConfigSnapshot', - 'DataProfileJobConfig', - 'DataProfileLocation', - 'DataProfilePubSubCondition', - 'DataProfilePubSubMessage', - 'DataRiskLevel', - 'DateShiftConfig', - 'DateTime', - 'DeidentifyConfig', - 'DeidentifyContentRequest', - 'DeidentifyContentResponse', - 'DeidentifyTemplate', - 'DeleteDeidentifyTemplateRequest', - 'DeleteDlpJobRequest', - 'DeleteInspectTemplateRequest', - 'DeleteJobTriggerRequest', - 'DeleteStoredInfoTypeRequest', - 'DlpJob', - 'DocumentLocation', - 'Error', - 'ExcludeByHotword', - 'ExcludeInfoTypes', - 'ExclusionRule', - 'FieldTransformation', - 'Finding', - 'FinishDlpJobRequest', - 'FixedSizeBucketingConfig', - 'GetDeidentifyTemplateRequest', - 'GetDlpJobRequest', - 'GetInspectTemplateRequest', - 'GetJobTriggerRequest', - 'GetStoredInfoTypeRequest', - 'HybridContentItem', - 'HybridFindingDetails', - 'HybridInspectDlpJobRequest', - 'HybridInspectJobTriggerRequest', - 'HybridInspectResponse', - 'HybridInspectStatistics', - 'ImageLocation', - 'ImageTransformations', - 'InfoTypeCategory', - 'InfoTypeDescription', - 'InfoTypeStats', - 'InfoTypeSummary', - 'InfoTypeTransformations', - 'InspectConfig', - 'InspectContentRequest', - 'InspectContentResponse', - 'InspectDataSourceDetails', - 'InspectionRule', - 'InspectionRuleSet', - 'InspectJobConfig', - 'InspectResult', - 'InspectTemplate', - 'JobTrigger', - 'KmsWrappedCryptoKey', - 'LargeCustomDictionaryConfig', - 'LargeCustomDictionaryStats', - 'ListDeidentifyTemplatesRequest', - 'ListDeidentifyTemplatesResponse', - 'ListDlpJobsRequest', - 'ListDlpJobsResponse', - 'ListInfoTypesRequest', - 'ListInfoTypesResponse', - 'ListInspectTemplatesRequest', - 'ListInspectTemplatesResponse', - 'ListJobTriggersRequest', - 'ListJobTriggersResponse', - 'ListStoredInfoTypesRequest', - 'ListStoredInfoTypesResponse', - 'Location', - 'Manual', - 'MetadataLocation', - 'OtherInfoTypeSummary', - 'OutputStorageConfig', - 'PrimitiveTransformation', - 'PrivacyMetric', - 'ProfileStatus', - 'QuasiId', - 'QuoteInfo', - 'Range', - 'RecordCondition', - 'RecordLocation', - 'RecordSuppression', - 'RecordTransformation', - 'RecordTransformations', - 'RedactConfig', - 'RedactImageRequest', - 'RedactImageResponse', - 'ReidentifyContentRequest', - 'ReidentifyContentResponse', - 'ReplaceDictionaryConfig', - 'ReplaceValueConfig', - 'ReplaceWithInfoTypeConfig', - 'RiskAnalysisJobConfig', - 'Schedule', - 'StatisticalTable', - 'StorageMetadataLabel', - 'StoredInfoType', - 'StoredInfoTypeConfig', - 'StoredInfoTypeStats', - 'StoredInfoTypeVersion', - 'Table', - 'TableDataProfile', - 'TableLocation', - 'TimePartConfig', - 'TransformationConfig', - 'TransformationDescription', - 'TransformationDetails', - 'TransformationDetailsStorageConfig', - 'TransformationErrorHandling', - 'TransformationLocation', - 'TransformationOverview', - 'TransformationResultStatus', - 'TransformationSummary', - 'TransientCryptoKey', - 'UnwrappedCryptoKey', - 'UpdateDeidentifyTemplateRequest', - 'UpdateInspectTemplateRequest', - 'UpdateJobTriggerRequest', - 'UpdateStoredInfoTypeRequest', - 'Value', - 'ValueFrequency', - 'VersionDescription', - 'ContentOption', - 'DlpJobType', - 'EncryptionStatus', - 'InfoTypeSupportedBy', - 'MatchingType', - 'MetadataType', - 'RelationalOperator', - 'ResourceVisibility', - 'StoredInfoTypeState', - 'TransformationContainerType', - 'TransformationResultStatusType', - 'TransformationType', - 'BigQueryField', - 'BigQueryKey', - 'BigQueryOptions', - 'BigQueryTable', - 'CloudStorageFileSet', - 'CloudStorageOptions', - 'CloudStoragePath', - 'CloudStorageRegexFileSet', - 'CustomInfoType', - 'DatastoreKey', - 'DatastoreOptions', - 'EntityId', - 'FieldId', - 'HybridOptions', - 'InfoType', - 'Key', - 'KindExpression', - 'PartitionId', - 'RecordKey', - 'SensitivityScore', - 'StorageConfig', - 'StoredType', - 'TableOptions', - 'FileType', - 'Likelihood', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py deleted file mode 100644 index 926b57bc..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py +++ /dev/null @@ -1,8848 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dlp_v2.types import storage -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import date_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore -from google.type import timeofday_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.privacy.dlp.v2', - manifest={ - 'TransformationResultStatusType', - 'TransformationContainerType', - 'TransformationType', - 'RelationalOperator', - 'MatchingType', - 'ContentOption', - 'MetadataType', - 'InfoTypeSupportedBy', - 'DlpJobType', - 'StoredInfoTypeState', - 'ResourceVisibility', - 'EncryptionStatus', - 'ExcludeInfoTypes', - 'ExcludeByHotword', - 'ExclusionRule', - 'InspectionRule', - 'InspectionRuleSet', - 'InspectConfig', - 'ByteContentItem', - 'ContentItem', - 'Table', - 'InspectResult', - 'Finding', - 'Location', - 'ContentLocation', - 'MetadataLocation', - 'StorageMetadataLabel', - 'DocumentLocation', - 'RecordLocation', - 'TableLocation', - 'Container', - 'Range', - 'ImageLocation', - 'BoundingBox', - 'RedactImageRequest', - 'Color', - 'RedactImageResponse', - 'DeidentifyContentRequest', - 'DeidentifyContentResponse', - 'ReidentifyContentRequest', - 'ReidentifyContentResponse', - 'InspectContentRequest', - 'InspectContentResponse', - 'OutputStorageConfig', - 'InfoTypeStats', - 'InspectDataSourceDetails', - 'HybridInspectStatistics', - 'InfoTypeDescription', - 'InfoTypeCategory', - 'VersionDescription', - 'ListInfoTypesRequest', - 'ListInfoTypesResponse', - 'RiskAnalysisJobConfig', - 'QuasiId', - 'StatisticalTable', - 'PrivacyMetric', - 'AnalyzeDataSourceRiskDetails', - 'ValueFrequency', - 'Value', - 'QuoteInfo', - 'DateTime', - 'DeidentifyConfig', - 'ImageTransformations', - 'TransformationErrorHandling', - 'PrimitiveTransformation', - 'TimePartConfig', - 'CryptoHashConfig', - 'CryptoDeterministicConfig', - 'ReplaceValueConfig', - 'ReplaceDictionaryConfig', - 'ReplaceWithInfoTypeConfig', - 'RedactConfig', - 'CharsToIgnore', - 'CharacterMaskConfig', - 'FixedSizeBucketingConfig', - 'BucketingConfig', - 'CryptoReplaceFfxFpeConfig', - 'CryptoKey', - 'TransientCryptoKey', - 'UnwrappedCryptoKey', - 'KmsWrappedCryptoKey', - 'DateShiftConfig', - 'InfoTypeTransformations', - 'FieldTransformation', - 'RecordTransformations', - 'RecordSuppression', - 'RecordCondition', - 'TransformationOverview', - 'TransformationSummary', - 'TransformationDescription', - 'TransformationDetails', - 'TransformationLocation', - 'RecordTransformation', - 'TransformationResultStatus', - 'TransformationDetailsStorageConfig', - 'Schedule', - 'Manual', - 'InspectTemplate', - 'DeidentifyTemplate', - 'Error', - 'JobTrigger', - 'Action', - 'TransformationConfig', - 'CreateInspectTemplateRequest', - 'UpdateInspectTemplateRequest', - 'GetInspectTemplateRequest', - 'ListInspectTemplatesRequest', - 'ListInspectTemplatesResponse', - 'DeleteInspectTemplateRequest', - 'CreateJobTriggerRequest', - 'ActivateJobTriggerRequest', - 'UpdateJobTriggerRequest', - 'GetJobTriggerRequest', - 'CreateDlpJobRequest', - 'ListJobTriggersRequest', - 'ListJobTriggersResponse', - 'DeleteJobTriggerRequest', - 'InspectJobConfig', - 'DataProfileAction', - 'DataProfileJobConfig', - 'DataProfileLocation', - 'DlpJob', - 'GetDlpJobRequest', - 'ListDlpJobsRequest', - 'ListDlpJobsResponse', - 'CancelDlpJobRequest', - 'FinishDlpJobRequest', - 'DeleteDlpJobRequest', - 'CreateDeidentifyTemplateRequest', - 'UpdateDeidentifyTemplateRequest', - 'GetDeidentifyTemplateRequest', - 'ListDeidentifyTemplatesRequest', - 'ListDeidentifyTemplatesResponse', - 'DeleteDeidentifyTemplateRequest', - 'LargeCustomDictionaryConfig', - 'LargeCustomDictionaryStats', - 'StoredInfoTypeConfig', - 'StoredInfoTypeStats', - 'StoredInfoTypeVersion', - 'StoredInfoType', - 'CreateStoredInfoTypeRequest', - 'UpdateStoredInfoTypeRequest', - 'GetStoredInfoTypeRequest', - 'ListStoredInfoTypesRequest', - 'ListStoredInfoTypesResponse', - 'DeleteStoredInfoTypeRequest', - 'HybridInspectJobTriggerRequest', - 'HybridInspectDlpJobRequest', - 'HybridContentItem', - 'HybridFindingDetails', - 'HybridInspectResponse', - 'DataRiskLevel', - 'DataProfileConfigSnapshot', - 'TableDataProfile', - 'ProfileStatus', - 'InfoTypeSummary', - 'OtherInfoTypeSummary', - 'DataProfilePubSubCondition', - 'DataProfilePubSubMessage', - }, -) - - -class TransformationResultStatusType(proto.Enum): - r"""Enum of possible outcomes of transformations. SUCCESS if - transformation and storing of transformation was successful, - otherwise, reason for not transforming. - - Values: - STATE_TYPE_UNSPECIFIED (0): - No description available. - INVALID_TRANSFORM (1): - This will be set when a finding could not be - transformed (i.e. outside user set bucket - range). - BIGQUERY_MAX_ROW_SIZE_EXCEEDED (2): - This will be set when a BigQuery - transformation was successful but could not be - stored back in BigQuery because the transformed - row exceeds BigQuery's max row size. - METADATA_UNRETRIEVABLE (3): - This will be set when there is a finding in - the custom metadata of a file, but at the write - time of the transformed file, this key / value - pair is unretrievable. - SUCCESS (4): - This will be set when the transformation and - storing of it is successful. - """ - STATE_TYPE_UNSPECIFIED = 0 - INVALID_TRANSFORM = 1 - BIGQUERY_MAX_ROW_SIZE_EXCEEDED = 2 - METADATA_UNRETRIEVABLE = 3 - SUCCESS = 4 - - -class TransformationContainerType(proto.Enum): - r"""Describes functionality of a given container in its original - format. - - Values: - TRANSFORM_UNKNOWN_CONTAINER (0): - No description available. - TRANSFORM_BODY (1): - No description available. - TRANSFORM_METADATA (2): - No description available. - TRANSFORM_TABLE (3): - No description available. - """ - TRANSFORM_UNKNOWN_CONTAINER = 0 - TRANSFORM_BODY = 1 - TRANSFORM_METADATA = 2 - TRANSFORM_TABLE = 3 - - -class TransformationType(proto.Enum): - r"""An enum of rules that can be used to transform a value. Can be a - record suppression, or one of the transformation rules specified - under ``PrimitiveTransformation``. - - Values: - TRANSFORMATION_TYPE_UNSPECIFIED (0): - Unused - RECORD_SUPPRESSION (1): - Record suppression - REPLACE_VALUE (2): - Replace value - REPLACE_DICTIONARY (15): - Replace value using a dictionary. - REDACT (3): - Redact - CHARACTER_MASK (4): - Character mask - CRYPTO_REPLACE_FFX_FPE (5): - FFX-FPE - FIXED_SIZE_BUCKETING (6): - Fixed size bucketing - BUCKETING (7): - Bucketing - REPLACE_WITH_INFO_TYPE (8): - Replace with info type - TIME_PART (9): - Time part - CRYPTO_HASH (10): - Crypto hash - DATE_SHIFT (12): - Date shift - CRYPTO_DETERMINISTIC_CONFIG (13): - Deterministic crypto - REDACT_IMAGE (14): - Redact image - """ - TRANSFORMATION_TYPE_UNSPECIFIED = 0 - RECORD_SUPPRESSION = 1 - REPLACE_VALUE = 2 - REPLACE_DICTIONARY = 15 - REDACT = 3 - CHARACTER_MASK = 4 - CRYPTO_REPLACE_FFX_FPE = 5 - FIXED_SIZE_BUCKETING = 6 - BUCKETING = 7 - REPLACE_WITH_INFO_TYPE = 8 - TIME_PART = 9 - CRYPTO_HASH = 10 - DATE_SHIFT = 12 - CRYPTO_DETERMINISTIC_CONFIG = 13 - REDACT_IMAGE = 14 - - -class RelationalOperator(proto.Enum): - r"""Operators available for comparing the value of fields. - - Values: - RELATIONAL_OPERATOR_UNSPECIFIED (0): - Unused - EQUAL_TO (1): - Equal. Attempts to match even with - incompatible types. - NOT_EQUAL_TO (2): - Not equal to. Attempts to match even with - incompatible types. - GREATER_THAN (3): - Greater than. - LESS_THAN (4): - Less than. - GREATER_THAN_OR_EQUALS (5): - Greater than or equals. - LESS_THAN_OR_EQUALS (6): - Less than or equals. - EXISTS (7): - Exists - """ - RELATIONAL_OPERATOR_UNSPECIFIED = 0 - EQUAL_TO = 1 - NOT_EQUAL_TO = 2 - GREATER_THAN = 3 - LESS_THAN = 4 - GREATER_THAN_OR_EQUALS = 5 - LESS_THAN_OR_EQUALS = 6 - EXISTS = 7 - - -class MatchingType(proto.Enum): - r"""Type of the match which can be applied to different ways of - matching, like Dictionary, regular expression and intersecting - with findings of another info type. - - Values: - MATCHING_TYPE_UNSPECIFIED (0): - Invalid. - MATCHING_TYPE_FULL_MATCH (1): - Full match. - - Dictionary: join of Dictionary results matched - complete finding quote - Regex: all regex - matches fill a finding quote start to end - - Exclude info type: completely inside affecting - info types findings - MATCHING_TYPE_PARTIAL_MATCH (2): - Partial match. - - Dictionary: at least one of the tokens in the - finding matches - Regex: substring of the - finding matches - - Exclude info type: intersects with affecting - info types findings - MATCHING_TYPE_INVERSE_MATCH (3): - Inverse match. - - Dictionary: no tokens in the finding match the - dictionary - Regex: finding doesn't match the - regex - - Exclude info type: no intersection with - affecting info types findings - """ - MATCHING_TYPE_UNSPECIFIED = 0 - MATCHING_TYPE_FULL_MATCH = 1 - MATCHING_TYPE_PARTIAL_MATCH = 2 - MATCHING_TYPE_INVERSE_MATCH = 3 - - -class ContentOption(proto.Enum): - r"""Deprecated and unused. - - Values: - CONTENT_UNSPECIFIED (0): - Includes entire content of a file or a data - stream. - CONTENT_TEXT (1): - Text content within the data, excluding any - metadata. - CONTENT_IMAGE (2): - Images found in the data. - """ - CONTENT_UNSPECIFIED = 0 - CONTENT_TEXT = 1 - CONTENT_IMAGE = 2 - - -class MetadataType(proto.Enum): - r"""Type of metadata containing the finding. - - Values: - METADATATYPE_UNSPECIFIED (0): - Unused - STORAGE_METADATA (2): - General file metadata provided by Cloud - Storage. - """ - METADATATYPE_UNSPECIFIED = 0 - STORAGE_METADATA = 2 - - -class InfoTypeSupportedBy(proto.Enum): - r"""Parts of the APIs which use certain infoTypes. - - Values: - ENUM_TYPE_UNSPECIFIED (0): - Unused. - INSPECT (1): - Supported by the inspect operations. - RISK_ANALYSIS (2): - Supported by the risk analysis operations. - """ - ENUM_TYPE_UNSPECIFIED = 0 - INSPECT = 1 - RISK_ANALYSIS = 2 - - -class DlpJobType(proto.Enum): - r"""An enum to represent the various types of DLP jobs. - - Values: - DLP_JOB_TYPE_UNSPECIFIED (0): - Defaults to INSPECT_JOB. - INSPECT_JOB (1): - The job inspected Google Cloud for sensitive - data. - RISK_ANALYSIS_JOB (2): - The job executed a Risk Analysis computation. - """ - DLP_JOB_TYPE_UNSPECIFIED = 0 - INSPECT_JOB = 1 - RISK_ANALYSIS_JOB = 2 - - -class StoredInfoTypeState(proto.Enum): - r"""State of a StoredInfoType version. - - Values: - STORED_INFO_TYPE_STATE_UNSPECIFIED (0): - Unused - PENDING (1): - StoredInfoType version is being created. - READY (2): - StoredInfoType version is ready for use. - FAILED (3): - StoredInfoType creation failed. All relevant error messages - are returned in the ``StoredInfoTypeVersion`` message. - INVALID (4): - StoredInfoType is no longer valid because artifacts stored - in user-controlled storage were modified. To fix an invalid - StoredInfoType, use the ``UpdateStoredInfoType`` method to - create a new version. - """ - STORED_INFO_TYPE_STATE_UNSPECIFIED = 0 - PENDING = 1 - READY = 2 - FAILED = 3 - INVALID = 4 - - -class ResourceVisibility(proto.Enum): - r"""How broadly a resource has been shared. New items may be - added over time. A higher number means more restricted. - - Values: - RESOURCE_VISIBILITY_UNSPECIFIED (0): - Unused. - RESOURCE_VISIBILITY_PUBLIC (10): - Visible to any user. - RESOURCE_VISIBILITY_RESTRICTED (20): - Visible only to specific users. - """ - RESOURCE_VISIBILITY_UNSPECIFIED = 0 - RESOURCE_VISIBILITY_PUBLIC = 10 - RESOURCE_VISIBILITY_RESTRICTED = 20 - - -class EncryptionStatus(proto.Enum): - r"""How a resource is encrypted. - - Values: - ENCRYPTION_STATUS_UNSPECIFIED (0): - Unused. - ENCRYPTION_GOOGLE_MANAGED (1): - Google manages server-side encryption keys on - your behalf. - ENCRYPTION_CUSTOMER_MANAGED (2): - Customer provides the key. - """ - ENCRYPTION_STATUS_UNSPECIFIED = 0 - ENCRYPTION_GOOGLE_MANAGED = 1 - ENCRYPTION_CUSTOMER_MANAGED = 2 - - -class ExcludeInfoTypes(proto.Message): - r"""List of excluded infoTypes. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - InfoType list in ExclusionRule rule drops a finding when it - overlaps or contained within with a finding of an infoType - from this list. For example, for - ``InspectionRuleSet.info_types`` containing - "PHONE_NUMBER"``and``\ exclusion_rule\ ``containing``\ exclude_info_types.info_types\` - with "EMAIL_ADDRESS" the phone number findings are dropped - if they overlap with EMAIL_ADDRESS finding. That leads to - "555-222-2222@example.org" to generate only a single - finding, namely email address. - """ - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - - -class ExcludeByHotword(proto.Message): - r"""The rule to exclude findings based on a hotword. For record - inspection of tables, column names are considered hotwords. An - example of this is to exclude a finding if a BigQuery column - matches a specific pattern. - - Attributes: - hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression pattern defining what - qualifies as a hotword. - proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): - Range of characters within which the entire - hotword must reside. The total length of the - window cannot exceed 1000 characters. The - windowBefore property in proximity should be set - to 1 if the hotword needs to be included in a - column header. - """ - - hotword_regex: storage.CustomInfoType.Regex = proto.Field( - proto.MESSAGE, - number=1, - message=storage.CustomInfoType.Regex, - ) - proximity: storage.CustomInfoType.DetectionRule.Proximity = proto.Field( - proto.MESSAGE, - number=2, - message=storage.CustomInfoType.DetectionRule.Proximity, - ) - - -class ExclusionRule(proto.Message): - r"""The rule that specifies conditions when findings of infoTypes - specified in ``InspectionRuleSet`` are removed from results. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): - Dictionary which defines the rule. - - This field is a member of `oneof`_ ``type``. - regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression which defines the rule. - - This field is a member of `oneof`_ ``type``. - exclude_info_types (google.cloud.dlp_v2.types.ExcludeInfoTypes): - Set of infoTypes for which findings would - affect this rule. - - This field is a member of `oneof`_ ``type``. - exclude_by_hotword (google.cloud.dlp_v2.types.ExcludeByHotword): - Drop if the hotword rule is contained in the - proximate context. For tabular data, the context - includes the column name. - - This field is a member of `oneof`_ ``type``. - matching_type (google.cloud.dlp_v2.types.MatchingType): - How the rule is applied, see MatchingType - documentation for details. - """ - - dictionary: storage.CustomInfoType.Dictionary = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.CustomInfoType.Dictionary, - ) - regex: storage.CustomInfoType.Regex = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message=storage.CustomInfoType.Regex, - ) - exclude_info_types: 'ExcludeInfoTypes' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='ExcludeInfoTypes', - ) - exclude_by_hotword: 'ExcludeByHotword' = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message='ExcludeByHotword', - ) - matching_type: 'MatchingType' = proto.Field( - proto.ENUM, - number=4, - enum='MatchingType', - ) - - -class InspectionRule(proto.Message): - r"""A single inspection rule to be applied to infoTypes, specified in - ``InspectionRuleSet``. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): - Hotword-based detection rule. - - This field is a member of `oneof`_ ``type``. - exclusion_rule (google.cloud.dlp_v2.types.ExclusionRule): - Exclusion rule. - - This field is a member of `oneof`_ ``type``. - """ - - hotword_rule: storage.CustomInfoType.DetectionRule.HotwordRule = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.CustomInfoType.DetectionRule.HotwordRule, - ) - exclusion_rule: 'ExclusionRule' = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message='ExclusionRule', - ) - - -class InspectionRuleSet(proto.Message): - r"""Rule set for modifying a set of infoTypes to alter behavior - under certain circumstances, depending on the specific details - of the rules within the set. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - List of infoTypes this rule set is applied - to. - rules (MutableSequence[google.cloud.dlp_v2.types.InspectionRule]): - Set of rules to be applied to infoTypes. The - rules are applied in order. - """ - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - rules: MutableSequence['InspectionRule'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='InspectionRule', - ) - - -class InspectConfig(proto.Message): - r"""Configuration description of the scanning process. When used with - redactContent only info_types and min_likelihood are currently used. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - Restricts what info_types to look for. The values must - correspond to InfoType values returned by ListInfoTypes or - listed at - https://cloud.google.com/dlp/docs/infotypes-reference. - - When no InfoTypes or CustomInfoTypes are specified in a - request, the system may automatically choose what detectors - to run. By default this may be all types, but may change - over time as detectors are updated. - - If you need precise control and predictability as to what - detectors are run you should specify specific InfoTypes - listed in the reference, otherwise a default list will be - used, which may change over time. - min_likelihood (google.cloud.dlp_v2.types.Likelihood): - Only returns findings equal or above this - threshold. The default is POSSIBLE. - See https://cloud.google.com/dlp/docs/likelihood - to learn more. - limits (google.cloud.dlp_v2.types.InspectConfig.FindingLimits): - Configuration to control the number of findings returned. - This is not used for data profiling. - - When redacting sensitive data from images, finding limits - don't apply. They can cause unexpected or inconsistent - results, where only some data is redacted. Don't include - finding limits in - [RedactImage][google.privacy.dlp.v2.DlpService.RedactImage] - requests. Otherwise, Cloud DLP returns an error. - include_quote (bool): - When true, a contextual quote from the data that triggered a - finding is included in the response; see - [Finding.quote][google.privacy.dlp.v2.Finding.quote]. This - is not used for data profiling. - exclude_info_types (bool): - When true, excludes type information of the - findings. This is not used for data profiling. - custom_info_types (MutableSequence[google.cloud.dlp_v2.types.CustomInfoType]): - CustomInfoTypes provided by the user. See - https://cloud.google.com/dlp/docs/creating-custom-infotypes - to learn more. - content_options (MutableSequence[google.cloud.dlp_v2.types.ContentOption]): - Deprecated and unused. - rule_set (MutableSequence[google.cloud.dlp_v2.types.InspectionRuleSet]): - Set of rules to apply to the findings for - this InspectConfig. Exclusion rules, contained - in the set are executed in the end, other rules - are executed in the order they are specified for - each info type. - """ - - class FindingLimits(proto.Message): - r"""Configuration to control the number of findings returned for - inspection. This is not used for de-identification or data - profiling. - - When redacting sensitive data from images, finding limits don't - apply. They can cause unexpected or inconsistent results, where only - some data is redacted. Don't include finding limits in - [RedactImage][google.privacy.dlp.v2.DlpService.RedactImage] - requests. Otherwise, Cloud DLP returns an error. - - Attributes: - max_findings_per_item (int): - Max number of findings that will be returned for each item - scanned. When set within ``InspectJobConfig``, the maximum - returned is 2000 regardless if this is set higher. When set - within ``InspectContentRequest``, this field is ignored. - max_findings_per_request (int): - Max number of findings that will be returned per - request/job. When set within ``InspectContentRequest``, the - maximum returned is 2000 regardless if this is set higher. - max_findings_per_info_type (MutableSequence[google.cloud.dlp_v2.types.InspectConfig.FindingLimits.InfoTypeLimit]): - Configuration of findings limit given for - specified infoTypes. - """ - - class InfoTypeLimit(proto.Message): - r"""Max findings configuration per infoType, per content item or - long running DlpJob. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - Type of information the findings limit applies to. Only one - limit per info_type should be provided. If InfoTypeLimit - does not have an info_type, the DLP API applies the limit - against all info_types that are found but not specified in - another InfoTypeLimit. - max_findings (int): - Max findings limit for the given infoType. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - max_findings: int = proto.Field( - proto.INT32, - number=2, - ) - - max_findings_per_item: int = proto.Field( - proto.INT32, - number=1, - ) - max_findings_per_request: int = proto.Field( - proto.INT32, - number=2, - ) - max_findings_per_info_type: MutableSequence['InspectConfig.FindingLimits.InfoTypeLimit'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='InspectConfig.FindingLimits.InfoTypeLimit', - ) - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - min_likelihood: storage.Likelihood = proto.Field( - proto.ENUM, - number=2, - enum=storage.Likelihood, - ) - limits: FindingLimits = proto.Field( - proto.MESSAGE, - number=3, - message=FindingLimits, - ) - include_quote: bool = proto.Field( - proto.BOOL, - number=4, - ) - exclude_info_types: bool = proto.Field( - proto.BOOL, - number=5, - ) - custom_info_types: MutableSequence[storage.CustomInfoType] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message=storage.CustomInfoType, - ) - content_options: MutableSequence['ContentOption'] = proto.RepeatedField( - proto.ENUM, - number=8, - enum='ContentOption', - ) - rule_set: MutableSequence['InspectionRuleSet'] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='InspectionRuleSet', - ) - - -class ByteContentItem(proto.Message): - r"""Container for bytes to inspect or redact. - - Attributes: - type_ (google.cloud.dlp_v2.types.ByteContentItem.BytesType): - The type of data stored in the bytes string. Default will be - TEXT_UTF8. - data (bytes): - Content data to inspect or redact. - """ - class BytesType(proto.Enum): - r"""The type of data being sent for inspection. To learn more, see - `Supported file - types `__. - - Values: - BYTES_TYPE_UNSPECIFIED (0): - Unused - IMAGE (6): - Any image type. - IMAGE_JPEG (1): - jpeg - IMAGE_BMP (2): - bmp - IMAGE_PNG (3): - png - IMAGE_SVG (4): - svg - TEXT_UTF8 (5): - plain text - WORD_DOCUMENT (7): - docx, docm, dotx, dotm - PDF (8): - pdf - POWERPOINT_DOCUMENT (9): - pptx, pptm, potx, potm, pot - EXCEL_DOCUMENT (10): - xlsx, xlsm, xltx, xltm - AVRO (11): - avro - CSV (12): - csv - TSV (13): - tsv - """ - BYTES_TYPE_UNSPECIFIED = 0 - IMAGE = 6 - IMAGE_JPEG = 1 - IMAGE_BMP = 2 - IMAGE_PNG = 3 - IMAGE_SVG = 4 - TEXT_UTF8 = 5 - WORD_DOCUMENT = 7 - PDF = 8 - POWERPOINT_DOCUMENT = 9 - EXCEL_DOCUMENT = 10 - AVRO = 11 - CSV = 12 - TSV = 13 - - type_: BytesType = proto.Field( - proto.ENUM, - number=1, - enum=BytesType, - ) - data: bytes = proto.Field( - proto.BYTES, - number=2, - ) - - -class ContentItem(proto.Message): - r""" - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - value (str): - String data to inspect or redact. - - This field is a member of `oneof`_ ``data_item``. - table (google.cloud.dlp_v2.types.Table): - Structured content for inspection. See - https://cloud.google.com/dlp/docs/inspecting-text#inspecting_a_table - to learn more. - - This field is a member of `oneof`_ ``data_item``. - byte_item (google.cloud.dlp_v2.types.ByteContentItem): - Content data to inspect or redact. Replaces ``type`` and - ``data``. - - This field is a member of `oneof`_ ``data_item``. - """ - - value: str = proto.Field( - proto.STRING, - number=3, - oneof='data_item', - ) - table: 'Table' = proto.Field( - proto.MESSAGE, - number=4, - oneof='data_item', - message='Table', - ) - byte_item: 'ByteContentItem' = proto.Field( - proto.MESSAGE, - number=5, - oneof='data_item', - message='ByteContentItem', - ) - - -class Table(proto.Message): - r"""Structured content to inspect. Up to 50,000 ``Value``\ s per request - allowed. See - https://cloud.google.com/dlp/docs/inspecting-structured-text#inspecting_a_table - to learn more. - - Attributes: - headers (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Headers of the table. - rows (MutableSequence[google.cloud.dlp_v2.types.Table.Row]): - Rows of the table. - """ - - class Row(proto.Message): - r"""Values of the row. - - Attributes: - values (MutableSequence[google.cloud.dlp_v2.types.Value]): - Individual cells. - """ - - values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - - headers: MutableSequence[storage.FieldId] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - rows: MutableSequence[Row] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=Row, - ) - - -class InspectResult(proto.Message): - r"""All the findings for a single scanned item. - - Attributes: - findings (MutableSequence[google.cloud.dlp_v2.types.Finding]): - List of findings for an item. - findings_truncated (bool): - If true, then this item might have more - findings than were returned, and the findings - returned are an arbitrary subset of all - findings. The findings list might be truncated - because the input items were too large, or - because the server reached the maximum amount of - resources allowed for a single API call. For - best results, divide the input into smaller - batches. - """ - - findings: MutableSequence['Finding'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Finding', - ) - findings_truncated: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -class Finding(proto.Message): - r"""Represents a piece of potentially sensitive content. - - Attributes: - name (str): - Resource name in format - projects/{project}/locations/{location}/findings/{finding} - Populated only when viewing persisted findings. - quote (str): - The content that was found. Even if the content is not - textual, it may be converted to a textual representation - here. Provided if ``include_quote`` is true and the finding - is less than or equal to 4096 bytes long. If the finding - exceeds 4096 bytes in length, the quote may be omitted. - info_type (google.cloud.dlp_v2.types.InfoType): - The type of content that might have been found. Provided if - ``excluded_types`` is false. - likelihood (google.cloud.dlp_v2.types.Likelihood): - Confidence of how likely it is that the ``info_type`` is - correct. - location (google.cloud.dlp_v2.types.Location): - Where the content was found. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp when finding was detected. - quote_info (google.cloud.dlp_v2.types.QuoteInfo): - Contains data parsed from quotes. Only populated if - include_quote was set to true and a supported infoType was - requested. Currently supported infoTypes: DATE, - DATE_OF_BIRTH and TIME. - resource_name (str): - The job that stored the finding. - trigger_name (str): - Job trigger name, if applicable, for this - finding. - labels (MutableMapping[str, str]): - The labels associated with this ``Finding``. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 10 labels can be associated with a given - finding. - - Examples: - - - ``"environment" : "production"`` - - ``"pipeline" : "etl"`` - job_create_time (google.protobuf.timestamp_pb2.Timestamp): - Time the job started that produced this - finding. - job_name (str): - The job that stored the finding. - finding_id (str): - The unique finding id. - """ - - name: str = proto.Field( - proto.STRING, - number=14, - ) - quote: str = proto.Field( - proto.STRING, - number=1, - ) - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=2, - message=storage.InfoType, - ) - likelihood: storage.Likelihood = proto.Field( - proto.ENUM, - number=3, - enum=storage.Likelihood, - ) - location: 'Location' = proto.Field( - proto.MESSAGE, - number=4, - message='Location', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - quote_info: 'QuoteInfo' = proto.Field( - proto.MESSAGE, - number=7, - message='QuoteInfo', - ) - resource_name: str = proto.Field( - proto.STRING, - number=8, - ) - trigger_name: str = proto.Field( - proto.STRING, - number=9, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=10, - ) - job_create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - job_name: str = proto.Field( - proto.STRING, - number=13, - ) - finding_id: str = proto.Field( - proto.STRING, - number=15, - ) - - -class Location(proto.Message): - r"""Specifies the location of the finding. - - Attributes: - byte_range (google.cloud.dlp_v2.types.Range): - Zero-based byte offsets delimiting the - finding. These are relative to the finding's - containing element. Note that when the content - is not textual, this references the UTF-8 - encoded textual representation of the content. - Omitted if content is an image. - codepoint_range (google.cloud.dlp_v2.types.Range): - Unicode character offsets delimiting the - finding. These are relative to the finding's - containing element. Provided when the content is - text. - content_locations (MutableSequence[google.cloud.dlp_v2.types.ContentLocation]): - List of nested objects pointing to the - precise location of the finding within the file - or record. - container (google.cloud.dlp_v2.types.Container): - Information about the container where this - finding occurred, if available. - """ - - byte_range: 'Range' = proto.Field( - proto.MESSAGE, - number=1, - message='Range', - ) - codepoint_range: 'Range' = proto.Field( - proto.MESSAGE, - number=2, - message='Range', - ) - content_locations: MutableSequence['ContentLocation'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='ContentLocation', - ) - container: 'Container' = proto.Field( - proto.MESSAGE, - number=8, - message='Container', - ) - - -class ContentLocation(proto.Message): - r"""Precise location of the finding within a document, record, - image, or metadata container. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - container_name (str): - Name of the container where the finding is located. The top - level name is the source file name or table name. Names of - some common storage containers are formatted as follows: - - - BigQuery tables: ``{project_id}:{dataset_id}.{table_id}`` - - Cloud Storage files: ``gs://{bucket}/{path}`` - - Datastore namespace: {namespace} - - Nested names could be absent if the embedded object has no - string identifier (for example, an image contained within a - document). - record_location (google.cloud.dlp_v2.types.RecordLocation): - Location within a row or record of a database - table. - - This field is a member of `oneof`_ ``location``. - image_location (google.cloud.dlp_v2.types.ImageLocation): - Location within an image's pixels. - - This field is a member of `oneof`_ ``location``. - document_location (google.cloud.dlp_v2.types.DocumentLocation): - Location data for document files. - - This field is a member of `oneof`_ ``location``. - metadata_location (google.cloud.dlp_v2.types.MetadataLocation): - Location within the metadata for inspected - content. - - This field is a member of `oneof`_ ``location``. - container_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Finding container modification timestamp, if applicable. For - Cloud Storage, this field contains the last file - modification timestamp. For a BigQuery table, this field - contains the last_modified_time property. For Datastore, - this field isn't populated. - container_version (str): - Finding container version, if available - ("generation" for Cloud Storage). - """ - - container_name: str = proto.Field( - proto.STRING, - number=1, - ) - record_location: 'RecordLocation' = proto.Field( - proto.MESSAGE, - number=2, - oneof='location', - message='RecordLocation', - ) - image_location: 'ImageLocation' = proto.Field( - proto.MESSAGE, - number=3, - oneof='location', - message='ImageLocation', - ) - document_location: 'DocumentLocation' = proto.Field( - proto.MESSAGE, - number=5, - oneof='location', - message='DocumentLocation', - ) - metadata_location: 'MetadataLocation' = proto.Field( - proto.MESSAGE, - number=8, - oneof='location', - message='MetadataLocation', - ) - container_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - container_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class MetadataLocation(proto.Message): - r"""Metadata Location - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - type_ (google.cloud.dlp_v2.types.MetadataType): - Type of metadata containing the finding. - storage_label (google.cloud.dlp_v2.types.StorageMetadataLabel): - Storage metadata. - - This field is a member of `oneof`_ ``label``. - """ - - type_: 'MetadataType' = proto.Field( - proto.ENUM, - number=1, - enum='MetadataType', - ) - storage_label: 'StorageMetadataLabel' = proto.Field( - proto.MESSAGE, - number=3, - oneof='label', - message='StorageMetadataLabel', - ) - - -class StorageMetadataLabel(proto.Message): - r"""Storage metadata label to indicate which metadata entry - contains findings. - - Attributes: - key (str): - - """ - - key: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DocumentLocation(proto.Message): - r"""Location of a finding within a document. - - Attributes: - file_offset (int): - Offset of the line, from the beginning of the - file, where the finding is located. - """ - - file_offset: int = proto.Field( - proto.INT64, - number=1, - ) - - -class RecordLocation(proto.Message): - r"""Location of a finding within a row or record. - - Attributes: - record_key (google.cloud.dlp_v2.types.RecordKey): - Key of the finding. - field_id (google.cloud.dlp_v2.types.FieldId): - Field id of the field containing the finding. - table_location (google.cloud.dlp_v2.types.TableLocation): - Location within a ``ContentItem.Table``. - """ - - record_key: storage.RecordKey = proto.Field( - proto.MESSAGE, - number=1, - message=storage.RecordKey, - ) - field_id: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - table_location: 'TableLocation' = proto.Field( - proto.MESSAGE, - number=3, - message='TableLocation', - ) - - -class TableLocation(proto.Message): - r"""Location of a finding within a table. - - Attributes: - row_index (int): - The zero-based index of the row where the finding is - located. Only populated for resources that have a natural - ordering, not BigQuery. In BigQuery, to identify the row a - finding came from, populate - BigQueryOptions.identifying_fields with your primary key - column names and when you store the findings the value of - those columns will be stored inside of Finding. - """ - - row_index: int = proto.Field( - proto.INT64, - number=1, - ) - - -class Container(proto.Message): - r"""Represents a container that may contain DLP findings. - Examples of a container include a file, table, or database - record. - - Attributes: - type_ (str): - Container type, for example BigQuery or Cloud - Storage. - project_id (str): - Project where the finding was found. - Can be different from the project that owns the - finding. - full_path (str): - A string representation of the full container - name. Examples: - - BigQuery: 'Project:DataSetId.TableId' - - Cloud Storage: - 'gs://Bucket/folders/filename.txt' - root_path (str): - The root of the container. Examples: - - - For BigQuery table ``project_id:dataset_id.table_id``, - the root is ``dataset_id`` - - For Cloud Storage file - ``gs://bucket/folder/filename.txt``, the root is - ``gs://bucket`` - relative_path (str): - The rest of the path after the root. Examples: - - - For BigQuery table ``project_id:dataset_id.table_id``, - the relative path is ``table_id`` - - For Cloud Storage file - ``gs://bucket/folder/filename.txt``, the relative path is - ``folder/filename.txt`` - update_time (google.protobuf.timestamp_pb2.Timestamp): - Findings container modification timestamp, if applicable. - For Cloud Storage, this field contains the last file - modification timestamp. For a BigQuery table, this field - contains the last_modified_time property. For Datastore, - this field isn't populated. - version (str): - Findings container version, if available - ("generation" for Cloud Storage). - """ - - type_: str = proto.Field( - proto.STRING, - number=1, - ) - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - full_path: str = proto.Field( - proto.STRING, - number=3, - ) - root_path: str = proto.Field( - proto.STRING, - number=4, - ) - relative_path: str = proto.Field( - proto.STRING, - number=5, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class Range(proto.Message): - r"""Generic half-open interval [start, end) - - Attributes: - start (int): - Index of the first character of the range - (inclusive). - end (int): - Index of the last character of the range - (exclusive). - """ - - start: int = proto.Field( - proto.INT64, - number=1, - ) - end: int = proto.Field( - proto.INT64, - number=2, - ) - - -class ImageLocation(proto.Message): - r"""Location of the finding within an image. - - Attributes: - bounding_boxes (MutableSequence[google.cloud.dlp_v2.types.BoundingBox]): - Bounding boxes locating the pixels within the - image containing the finding. - """ - - bounding_boxes: MutableSequence['BoundingBox'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='BoundingBox', - ) - - -class BoundingBox(proto.Message): - r"""Bounding box encompassing detected text within an image. - - Attributes: - top (int): - Top coordinate of the bounding box. (0,0) is - upper left. - left (int): - Left coordinate of the bounding box. (0,0) is - upper left. - width (int): - Width of the bounding box in pixels. - height (int): - Height of the bounding box in pixels. - """ - - top: int = proto.Field( - proto.INT32, - number=1, - ) - left: int = proto.Field( - proto.INT32, - number=2, - ) - width: int = proto.Field( - proto.INT32, - number=3, - ) - height: int = proto.Field( - proto.INT32, - number=4, - ) - - -class RedactImageRequest(proto.Message): - r"""Request to search for potentially sensitive info in an image - and redact it by covering it with a colored rectangle. - - Attributes: - parent (str): - Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - location_id (str): - Deprecated. This field has no effect. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. - image_redaction_configs (MutableSequence[google.cloud.dlp_v2.types.RedactImageRequest.ImageRedactionConfig]): - The configuration for specifying what content - to redact from images. - include_findings (bool): - Whether the response should include findings - along with the redacted image. - byte_item (google.cloud.dlp_v2.types.ByteContentItem): - The content must be PNG, JPEG, SVG or BMP. - """ - - class ImageRedactionConfig(proto.Message): - r"""Configuration for determining how redaction of images should - occur. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - Only one per info_type should be provided per request. If - not specified, and redact_all_text is false, the DLP API - will redact all text that it matches against all info_types - that are found, but not specified in another - ImageRedactionConfig. - - This field is a member of `oneof`_ ``target``. - redact_all_text (bool): - If true, all text found in the image, regardless whether it - matches an info_type, is redacted. Only one should be - provided. - - This field is a member of `oneof`_ ``target``. - redaction_color (google.cloud.dlp_v2.types.Color): - The color to use when redacting content from - an image. If not specified, the default is - black. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - oneof='target', - message=storage.InfoType, - ) - redact_all_text: bool = proto.Field( - proto.BOOL, - number=2, - oneof='target', - ) - redaction_color: 'Color' = proto.Field( - proto.MESSAGE, - number=3, - message='Color', - ) - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - location_id: str = proto.Field( - proto.STRING, - number=8, - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - image_redaction_configs: MutableSequence[ImageRedactionConfig] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=ImageRedactionConfig, - ) - include_findings: bool = proto.Field( - proto.BOOL, - number=6, - ) - byte_item: 'ByteContentItem' = proto.Field( - proto.MESSAGE, - number=7, - message='ByteContentItem', - ) - - -class Color(proto.Message): - r"""Represents a color in the RGB color space. - - Attributes: - red (float): - The amount of red in the color as a value in the interval - [0, 1]. - green (float): - The amount of green in the color as a value in the interval - [0, 1]. - blue (float): - The amount of blue in the color as a value in the interval - [0, 1]. - """ - - red: float = proto.Field( - proto.FLOAT, - number=1, - ) - green: float = proto.Field( - proto.FLOAT, - number=2, - ) - blue: float = proto.Field( - proto.FLOAT, - number=3, - ) - - -class RedactImageResponse(proto.Message): - r"""Results of redacting an image. - - Attributes: - redacted_image (bytes): - The redacted image. The type will be the same - as the original image. - extracted_text (str): - If an image was being inspected and the InspectConfig's - include_quote was set to true, then this field will include - all text, if any, that was found in the image. - inspect_result (google.cloud.dlp_v2.types.InspectResult): - The findings. Populated when include_findings in the request - is true. - """ - - redacted_image: bytes = proto.Field( - proto.BYTES, - number=1, - ) - extracted_text: str = proto.Field( - proto.STRING, - number=2, - ) - inspect_result: 'InspectResult' = proto.Field( - proto.MESSAGE, - number=3, - message='InspectResult', - ) - - -class DeidentifyContentRequest(proto.Message): - r"""Request to de-identify a ContentItem. - - Attributes: - parent (str): - Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): - Configuration for the de-identification of the content item. - Items specified here will override the template referenced - by the deidentify_template_name argument. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. Items specified here will - override the template referenced by the - inspect_template_name argument. - item (google.cloud.dlp_v2.types.ContentItem): - The item to de-identify. Will be treated as text. - - This value must be of type - [Table][google.privacy.dlp.v2.Table] if your - [deidentify_config][google.privacy.dlp.v2.DeidentifyContentRequest.deidentify_config] - is a - [RecordTransformations][google.privacy.dlp.v2.RecordTransformations] - object. - inspect_template_name (str): - Template to use. Any configuration directly specified in - inspect_config will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - deidentify_template_name (str): - Template to use. Any configuration directly specified in - deidentify_config will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - deidentify_config: 'DeidentifyConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyConfig', - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='InspectConfig', - ) - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=4, - message='ContentItem', - ) - inspect_template_name: str = proto.Field( - proto.STRING, - number=5, - ) - deidentify_template_name: str = proto.Field( - proto.STRING, - number=6, - ) - location_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -class DeidentifyContentResponse(proto.Message): - r"""Results of de-identifying a ContentItem. - - Attributes: - item (google.cloud.dlp_v2.types.ContentItem): - The de-identified item. - overview (google.cloud.dlp_v2.types.TransformationOverview): - An overview of the changes that were made on the ``item``. - """ - - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=1, - message='ContentItem', - ) - overview: 'TransformationOverview' = proto.Field( - proto.MESSAGE, - number=2, - message='TransformationOverview', - ) - - -class ReidentifyContentRequest(proto.Message): - r"""Request to re-identify an item. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - reidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): - Configuration for the re-identification of the content item. - This field shares the same proto message type that is used - for de-identification, however its usage here is for the - reversal of the previous de-identification. - Re-identification is performed by examining the - transformations used to de-identify the items and executing - the reverse. This requires that only reversible - transformations be provided here. The reversible - transformations are: - - - ``CryptoDeterministicConfig`` - - ``CryptoReplaceFfxFpeConfig`` - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. - item (google.cloud.dlp_v2.types.ContentItem): - The item to re-identify. Will be treated as - text. - inspect_template_name (str): - Template to use. Any configuration directly specified in - ``inspect_config`` will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - reidentify_template_name (str): - Template to use. References an instance of - ``DeidentifyTemplate``. Any configuration directly specified - in ``reidentify_config`` or ``inspect_config`` will override - those set in the template. The ``DeidentifyTemplate`` used - must include only reversible transformations. Singular - fields that are set in this request will replace their - corresponding fields in the template. Repeated fields are - appended. Singular sub-messages and groups are recursively - merged. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - reidentify_config: 'DeidentifyConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyConfig', - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='InspectConfig', - ) - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=4, - message='ContentItem', - ) - inspect_template_name: str = proto.Field( - proto.STRING, - number=5, - ) - reidentify_template_name: str = proto.Field( - proto.STRING, - number=6, - ) - location_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -class ReidentifyContentResponse(proto.Message): - r"""Results of re-identifying an item. - - Attributes: - item (google.cloud.dlp_v2.types.ContentItem): - The re-identified item. - overview (google.cloud.dlp_v2.types.TransformationOverview): - An overview of the changes that were made to the ``item``. - """ - - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=1, - message='ContentItem', - ) - overview: 'TransformationOverview' = proto.Field( - proto.MESSAGE, - number=2, - message='TransformationOverview', - ) - - -class InspectContentRequest(proto.Message): - r"""Request to search for potentially sensitive info in a - ContentItem. - - Attributes: - parent (str): - Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. What specified here will - override the template referenced by the - inspect_template_name argument. - item (google.cloud.dlp_v2.types.ContentItem): - The item to inspect. - inspect_template_name (str): - Template to use. Any configuration directly specified in - inspect_config will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=3, - message='ContentItem', - ) - inspect_template_name: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class InspectContentResponse(proto.Message): - r"""Results of inspecting an item. - - Attributes: - result (google.cloud.dlp_v2.types.InspectResult): - The findings. - """ - - result: 'InspectResult' = proto.Field( - proto.MESSAGE, - number=1, - message='InspectResult', - ) - - -class OutputStorageConfig(proto.Message): - r"""Cloud repository for storing output. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Store findings in an existing table or a new table in an - existing dataset. If table_id is not set a new one will be - generated for you with the following format: - dlp_googleapis_yyyy_mm_dd_[dlp_job_id]. Pacific time zone - will be used for generating the date details. - - For Inspect, each column in an existing output table must - have the same name, type, and mode of a field in the - ``Finding`` object. - - For Risk, an existing output table should be the output of a - previous Risk analysis job run on the same source table, - with the same privacy metric and quasi-identifiers. Risk - jobs that analyze the same table but compute a different - privacy metric, or use different sets of quasi-identifiers, - cannot store their results in the same table. - - This field is a member of `oneof`_ ``type``. - output_schema (google.cloud.dlp_v2.types.OutputStorageConfig.OutputSchema): - Schema used for writing the findings for Inspect jobs. This - field is only used for Inspect and must be unspecified for - Risk jobs. Columns are derived from the ``Finding`` object. - If appending to an existing table, any columns from the - predefined schema that are missing will be added. No columns - in the existing table will be deleted. - - If unspecified, then all available columns will be used for - a new table or an (existing) table with no schema, and no - changes will be made to an existing table that has a schema. - Only for use with external storage. - """ - class OutputSchema(proto.Enum): - r"""Predefined schemas for storing findings. - Only for use with external storage. - - Values: - OUTPUT_SCHEMA_UNSPECIFIED (0): - Unused. - BASIC_COLUMNS (1): - Basic schema including only ``info_type``, ``quote``, - ``certainty``, and ``timestamp``. - GCS_COLUMNS (2): - Schema tailored to findings from scanning - Cloud Storage. - DATASTORE_COLUMNS (3): - Schema tailored to findings from scanning - Google Datastore. - BIG_QUERY_COLUMNS (4): - Schema tailored to findings from scanning - Google BigQuery. - ALL_COLUMNS (5): - Schema containing all columns. - """ - OUTPUT_SCHEMA_UNSPECIFIED = 0 - BASIC_COLUMNS = 1 - GCS_COLUMNS = 2 - DATASTORE_COLUMNS = 3 - BIG_QUERY_COLUMNS = 4 - ALL_COLUMNS = 5 - - table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.BigQueryTable, - ) - output_schema: OutputSchema = proto.Field( - proto.ENUM, - number=3, - enum=OutputSchema, - ) - - -class InfoTypeStats(proto.Message): - r"""Statistics regarding a specific InfoType. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - The type of finding this stat is for. - count (int): - Number of findings for this infoType. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - count: int = proto.Field( - proto.INT64, - number=2, - ) - - -class InspectDataSourceDetails(proto.Message): - r"""The results of an inspect DataSource job. - - Attributes: - requested_options (google.cloud.dlp_v2.types.InspectDataSourceDetails.RequestedOptions): - The configuration used for this job. - result (google.cloud.dlp_v2.types.InspectDataSourceDetails.Result): - A summary of the outcome of this inspection - job. - """ - - class RequestedOptions(proto.Message): - r"""Snapshot of the inspection configuration. - - Attributes: - snapshot_inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - If run with an InspectTemplate, a snapshot of - its state at the time of this run. - job_config (google.cloud.dlp_v2.types.InspectJobConfig): - Inspect config. - """ - - snapshot_inspect_template: 'InspectTemplate' = proto.Field( - proto.MESSAGE, - number=1, - message='InspectTemplate', - ) - job_config: 'InspectJobConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='InspectJobConfig', - ) - - class Result(proto.Message): - r"""All result fields mentioned below are updated while the job - is processing. - - Attributes: - processed_bytes (int): - Total size in bytes that were processed. - total_estimated_bytes (int): - Estimate of the number of bytes to process. - info_type_stats (MutableSequence[google.cloud.dlp_v2.types.InfoTypeStats]): - Statistics of how many instances of each info - type were found during inspect job. - hybrid_stats (google.cloud.dlp_v2.types.HybridInspectStatistics): - Statistics related to the processing of - hybrid inspect. - """ - - processed_bytes: int = proto.Field( - proto.INT64, - number=1, - ) - total_estimated_bytes: int = proto.Field( - proto.INT64, - number=2, - ) - info_type_stats: MutableSequence['InfoTypeStats'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='InfoTypeStats', - ) - hybrid_stats: 'HybridInspectStatistics' = proto.Field( - proto.MESSAGE, - number=7, - message='HybridInspectStatistics', - ) - - requested_options: RequestedOptions = proto.Field( - proto.MESSAGE, - number=2, - message=RequestedOptions, - ) - result: Result = proto.Field( - proto.MESSAGE, - number=3, - message=Result, - ) - - -class HybridInspectStatistics(proto.Message): - r"""Statistics related to processing hybrid inspect requests. - - Attributes: - processed_count (int): - The number of hybrid inspection requests - processed within this job. - aborted_count (int): - The number of hybrid inspection requests - aborted because the job ran out of quota or was - ended before they could be processed. - pending_count (int): - The number of hybrid requests currently being processed. - Only populated when called via method ``getDlpJob``. A burst - of traffic may cause hybrid inspect requests to be enqueued. - Processing will take place as quickly as possible, but - resource limitations may impact how long a request is - enqueued for. - """ - - processed_count: int = proto.Field( - proto.INT64, - number=1, - ) - aborted_count: int = proto.Field( - proto.INT64, - number=2, - ) - pending_count: int = proto.Field( - proto.INT64, - number=3, - ) - - -class InfoTypeDescription(proto.Message): - r"""InfoType description. - - Attributes: - name (str): - Internal name of the infoType. - display_name (str): - Human readable form of the infoType name. - supported_by (MutableSequence[google.cloud.dlp_v2.types.InfoTypeSupportedBy]): - Which parts of the API supports this - InfoType. - description (str): - Description of the infotype. Translated when - language is provided in the request. - versions (MutableSequence[google.cloud.dlp_v2.types.VersionDescription]): - A list of available versions for the - infotype. - categories (MutableSequence[google.cloud.dlp_v2.types.InfoTypeCategory]): - The category of the infoType. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - supported_by: MutableSequence['InfoTypeSupportedBy'] = proto.RepeatedField( - proto.ENUM, - number=3, - enum='InfoTypeSupportedBy', - ) - description: str = proto.Field( - proto.STRING, - number=4, - ) - versions: MutableSequence['VersionDescription'] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message='VersionDescription', - ) - categories: MutableSequence['InfoTypeCategory'] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='InfoTypeCategory', - ) - - -class InfoTypeCategory(proto.Message): - r"""Classification of infoTypes to organize them according to - geographic location, industry, and data type. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - location_category (google.cloud.dlp_v2.types.InfoTypeCategory.LocationCategory): - The region or country that issued the ID or - document represented by the infoType. - - This field is a member of `oneof`_ ``category``. - industry_category (google.cloud.dlp_v2.types.InfoTypeCategory.IndustryCategory): - The group of relevant businesses where this - infoType is commonly used - - This field is a member of `oneof`_ ``category``. - type_category (google.cloud.dlp_v2.types.InfoTypeCategory.TypeCategory): - The class of identifiers where this infoType - belongs - - This field is a member of `oneof`_ ``category``. - """ - class LocationCategory(proto.Enum): - r"""Enum of the current locations. - We might add more locations in the future. - - Values: - LOCATION_UNSPECIFIED (0): - Unused location - GLOBAL (1): - The infoType is not issued by or tied to a - specific region, but is used almost everywhere. - ARGENTINA (2): - The infoType is typically used in Argentina. - AUSTRALIA (3): - The infoType is typically used in Australia. - BELGIUM (4): - The infoType is typically used in Belgium. - BRAZIL (5): - The infoType is typically used in Brazil. - CANADA (6): - The infoType is typically used in Canada. - CHILE (7): - The infoType is typically used in Chile. - CHINA (8): - The infoType is typically used in China. - COLOMBIA (9): - The infoType is typically used in Colombia. - DENMARK (10): - The infoType is typically used in Denmark. - FRANCE (11): - The infoType is typically used in France. - FINLAND (12): - The infoType is typically used in Finland. - GERMANY (13): - The infoType is typically used in Germany. - HONG_KONG (14): - The infoType is typically used in Hong Kong. - INDIA (15): - The infoType is typically used in India. - INDONESIA (16): - The infoType is typically used in Indonesia. - IRELAND (17): - The infoType is typically used in Ireland. - ISRAEL (18): - The infoType is typically used in Israel. - ITALY (19): - The infoType is typically used in Italy. - JAPAN (20): - The infoType is typically used in Japan. - KOREA (21): - The infoType is typically used in Korea. - MEXICO (22): - The infoType is typically used in Mexico. - THE_NETHERLANDS (23): - The infoType is typically used in the - Netherlands. - NORWAY (24): - The infoType is typically used in Norway. - PARAGUAY (25): - The infoType is typically used in Paraguay. - PERU (26): - The infoType is typically used in Peru. - POLAND (27): - The infoType is typically used in Poland. - PORTUGAL (28): - The infoType is typically used in Portugal. - SINGAPORE (29): - The infoType is typically used in Singapore. - SOUTH_AFRICA (30): - The infoType is typically used in South - Africa. - SPAIN (31): - The infoType is typically used in Spain. - SWEDEN (32): - The infoType is typically used in Sweden. - TAIWAN (33): - The infoType is typically used in Taiwan. - THAILAND (34): - The infoType is typically used in Thailand. - TURKEY (35): - The infoType is typically used in Turkey. - UNITED_KINGDOM (36): - The infoType is typically used in the United - Kingdom. - UNITED_STATES (37): - The infoType is typically used in the United - States. - URUGUAY (38): - The infoType is typically used in Uruguay. - VENEZUELA (39): - The infoType is typically used in Venezuela. - INTERNAL (40): - The infoType is typically used in Google - internally. - NEW_ZEALAND (41): - The infoType is typically used in New - Zealand. - """ - LOCATION_UNSPECIFIED = 0 - GLOBAL = 1 - ARGENTINA = 2 - AUSTRALIA = 3 - BELGIUM = 4 - BRAZIL = 5 - CANADA = 6 - CHILE = 7 - CHINA = 8 - COLOMBIA = 9 - DENMARK = 10 - FRANCE = 11 - FINLAND = 12 - GERMANY = 13 - HONG_KONG = 14 - INDIA = 15 - INDONESIA = 16 - IRELAND = 17 - ISRAEL = 18 - ITALY = 19 - JAPAN = 20 - KOREA = 21 - MEXICO = 22 - THE_NETHERLANDS = 23 - NORWAY = 24 - PARAGUAY = 25 - PERU = 26 - POLAND = 27 - PORTUGAL = 28 - SINGAPORE = 29 - SOUTH_AFRICA = 30 - SPAIN = 31 - SWEDEN = 32 - TAIWAN = 33 - THAILAND = 34 - TURKEY = 35 - UNITED_KINGDOM = 36 - UNITED_STATES = 37 - URUGUAY = 38 - VENEZUELA = 39 - INTERNAL = 40 - NEW_ZEALAND = 41 - - class IndustryCategory(proto.Enum): - r"""Enum of the current industries in the category. - We might add more industries in the future. - - Values: - INDUSTRY_UNSPECIFIED (0): - Unused industry - FINANCE (1): - The infoType is typically used in the finance - industry. - HEALTH (2): - The infoType is typically used in the health - industry. - TELECOMMUNICATIONS (3): - The infoType is typically used in the - telecommunications industry. - """ - INDUSTRY_UNSPECIFIED = 0 - FINANCE = 1 - HEALTH = 2 - TELECOMMUNICATIONS = 3 - - class TypeCategory(proto.Enum): - r"""Enum of the current types in the category. - We might add more types in the future. - - Values: - TYPE_UNSPECIFIED (0): - Unused type - PII (1): - Personally identifiable information, for - example, a name or phone number - SPII (2): - Personally identifiable information that is - especially sensitive, for example, a passport - number. - DEMOGRAPHIC (3): - Attributes that can partially identify - someone, especially in combination with other - attributes, like age, height, and gender. - CREDENTIAL (4): - Confidential or secret information, for - example, a password. - GOVERNMENT_ID (5): - An identification document issued by a - government. - DOCUMENT (6): - A document, for example, a resume or source - code. - CONTEXTUAL_INFORMATION (7): - Information that is not sensitive on its own, - but provides details about the circumstances - surrounding an entity or an event. - """ - TYPE_UNSPECIFIED = 0 - PII = 1 - SPII = 2 - DEMOGRAPHIC = 3 - CREDENTIAL = 4 - GOVERNMENT_ID = 5 - DOCUMENT = 6 - CONTEXTUAL_INFORMATION = 7 - - location_category: LocationCategory = proto.Field( - proto.ENUM, - number=1, - oneof='category', - enum=LocationCategory, - ) - industry_category: IndustryCategory = proto.Field( - proto.ENUM, - number=2, - oneof='category', - enum=IndustryCategory, - ) - type_category: TypeCategory = proto.Field( - proto.ENUM, - number=3, - oneof='category', - enum=TypeCategory, - ) - - -class VersionDescription(proto.Message): - r"""Details about each available version for an infotype. - - Attributes: - version (str): - Name of the version - description (str): - Description of the version. - """ - - version: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListInfoTypesRequest(proto.Message): - r"""Request for the list of infoTypes. - - Attributes: - parent (str): - The parent resource name. - - The format of this value is as follows: - - :: - - locations/LOCATION_ID - language_code (str): - BCP-47 language code for localized infoType - friendly names. If omitted, or if localized - strings are not available, en-US strings will be - returned. - filter (str): - filter to only return infoTypes supported by certain parts - of the API. Defaults to supported_by=INSPECT. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=4, - ) - language_code: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - location_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListInfoTypesResponse(proto.Message): - r"""Response to the ListInfoTypes request. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoTypeDescription]): - Set of sensitive infoTypes. - """ - - info_types: MutableSequence['InfoTypeDescription'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='InfoTypeDescription', - ) - - -class RiskAnalysisJobConfig(proto.Message): - r"""Configuration for a risk analysis job. See - https://cloud.google.com/dlp/docs/concepts-risk-analysis to - learn more. - - Attributes: - privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): - Privacy metric to compute. - source_table (google.cloud.dlp_v2.types.BigQueryTable): - Input dataset to compute metrics over. - actions (MutableSequence[google.cloud.dlp_v2.types.Action]): - Actions to execute at the completion of the - job. Are executed in the order provided. - """ - - privacy_metric: 'PrivacyMetric' = proto.Field( - proto.MESSAGE, - number=1, - message='PrivacyMetric', - ) - source_table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=2, - message=storage.BigQueryTable, - ) - actions: MutableSequence['Action'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='Action', - ) - - -class QuasiId(proto.Message): - r"""A column with a semantic tag attached. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Required. Identifies the column. - info_type (google.cloud.dlp_v2.types.InfoType): - A column can be tagged with a InfoType to use the relevant - public dataset as a statistical model of population, if - available. We currently support US ZIP codes, region codes, - ages and genders. To programmatically obtain the list of - supported InfoTypes, use ListInfoTypes with the - supported_by=RISK_ANALYSIS filter. - - This field is a member of `oneof`_ ``tag``. - custom_tag (str): - A column can be tagged with a custom tag. In - this case, the user must indicate an auxiliary - table that contains statistical information on - the possible values of this column (below). - - This field is a member of `oneof`_ ``tag``. - inferred (google.protobuf.empty_pb2.Empty): - If no semantic tag is indicated, we infer the - statistical model from the distribution of - values in the input data - - This field is a member of `oneof`_ ``tag``. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=2, - oneof='tag', - message=storage.InfoType, - ) - custom_tag: str = proto.Field( - proto.STRING, - number=3, - oneof='tag', - ) - inferred: empty_pb2.Empty = proto.Field( - proto.MESSAGE, - number=4, - oneof='tag', - message=empty_pb2.Empty, - ) - - -class StatisticalTable(proto.Message): - r"""An auxiliary table containing statistical information on the - relative frequency of different quasi-identifiers values. It has - one or several quasi-identifiers columns, and one column that - indicates the relative frequency of each quasi-identifier tuple. - If a tuple is present in the data but not in the auxiliary - table, the corresponding relative frequency is assumed to be - zero (and thus, the tuple is highly reidentifiable). - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Required. Auxiliary table location. - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.StatisticalTable.QuasiIdentifierField]): - Required. Quasi-identifier columns. - relative_frequency (google.cloud.dlp_v2.types.FieldId): - Required. The relative frequency column must - contain a floating-point number between 0 and 1 - (inclusive). Null values are assumed to be zero. - """ - - class QuasiIdentifierField(proto.Message): - r"""A quasi-identifier column has a custom_tag, used to know which - column in the data corresponds to which column in the statistical - model. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Identifies the column. - custom_tag (str): - A column can be tagged with a custom tag. In - this case, the user must indicate an auxiliary - table that contains statistical information on - the possible values of this column (below). - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - custom_tag: str = proto.Field( - proto.STRING, - number=2, - ) - - table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=3, - message=storage.BigQueryTable, - ) - quasi_ids: MutableSequence[QuasiIdentifierField] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=QuasiIdentifierField, - ) - relative_frequency: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - - -class PrivacyMetric(proto.Message): - r"""Privacy metric to compute for reidentification risk analysis. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - numerical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.NumericalStatsConfig): - Numerical stats - - This field is a member of `oneof`_ ``type``. - categorical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.CategoricalStatsConfig): - Categorical stats - - This field is a member of `oneof`_ ``type``. - k_anonymity_config (google.cloud.dlp_v2.types.PrivacyMetric.KAnonymityConfig): - K-anonymity - - This field is a member of `oneof`_ ``type``. - l_diversity_config (google.cloud.dlp_v2.types.PrivacyMetric.LDiversityConfig): - l-diversity - - This field is a member of `oneof`_ ``type``. - k_map_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig): - k-map - - This field is a member of `oneof`_ ``type``. - delta_presence_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.DeltaPresenceEstimationConfig): - delta-presence - - This field is a member of `oneof`_ ``type``. - """ - - class NumericalStatsConfig(proto.Message): - r"""Compute numerical stats over an individual column, including - min, max, and quantiles. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Field to compute numerical stats on. - Supported types are integer, float, date, - datetime, timestamp, time. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - - class CategoricalStatsConfig(proto.Message): - r"""Compute numerical stats over an individual column, including - number of distinct values and value count distribution. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Field to compute categorical stats on. All - column types are supported except for arrays and - structs. However, it may be more informative to - use NumericalStats when the field type is - supported, depending on the data. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - - class KAnonymityConfig(proto.Message): - r"""k-anonymity metric, used for analysis of reidentification - risk. - - Attributes: - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Set of fields to compute k-anonymity over. - When multiple fields are specified, they are - considered a single composite key. Structs and - repeated data types are not supported; however, - nested fields are supported so long as they are - not structs themselves or nested within a - repeated field. - entity_id (google.cloud.dlp_v2.types.EntityId): - Message indicating that multiple rows might be associated to - a single individual. If the same entity_id is associated to - multiple quasi-identifier tuples over distinct rows, we - consider the entire collection of tuples as the composite - quasi-identifier. This collection is a multiset: the order - in which the different tuples appear in the dataset is - ignored, but their frequency is taken into account. - - Important note: a maximum of 1000 rows can be associated to - a single entity ID. If more rows are associated with the - same entity ID, some might be ignored. - """ - - quasi_ids: MutableSequence[storage.FieldId] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - entity_id: storage.EntityId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.EntityId, - ) - - class LDiversityConfig(proto.Message): - r"""l-diversity metric, used for analysis of reidentification - risk. - - Attributes: - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Set of quasi-identifiers indicating how - equivalence classes are defined for the - l-diversity computation. When multiple fields - are specified, they are considered a single - composite key. - sensitive_attribute (google.cloud.dlp_v2.types.FieldId): - Sensitive field for computing the l-value. - """ - - quasi_ids: MutableSequence[storage.FieldId] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - sensitive_attribute: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - - class KMapEstimationConfig(proto.Message): - r"""Reidentifiability metric. This corresponds to a risk model - similar to what is called "journalist risk" in the literature, - except the attack dataset is statistically modeled instead of - being perfectly known. This can be done using publicly available - data (like the US Census), or using a custom statistical model - (indicated as one or several BigQuery tables), or by - extrapolating from the distribution of values in the input - dataset. - - Attributes: - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.TaggedField]): - Required. Fields considered to be - quasi-identifiers. No two columns can have the - same tag. - region_code (str): - ISO 3166-1 alpha-2 region code to use in the statistical - modeling. Set if no column is tagged with a region-specific - InfoType (like US_ZIP_5) or a region code. - auxiliary_tables (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable]): - Several auxiliary tables can be used in the analysis. Each - custom_tag used to tag a quasi-identifiers column must - appear in exactly one column of one auxiliary table. - """ - - class TaggedField(proto.Message): - r"""A column with a semantic tag attached. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Required. Identifies the column. - info_type (google.cloud.dlp_v2.types.InfoType): - A column can be tagged with a InfoType to use the relevant - public dataset as a statistical model of population, if - available. We currently support US ZIP codes, region codes, - ages and genders. To programmatically obtain the list of - supported InfoTypes, use ListInfoTypes with the - supported_by=RISK_ANALYSIS filter. - - This field is a member of `oneof`_ ``tag``. - custom_tag (str): - A column can be tagged with a custom tag. In - this case, the user must indicate an auxiliary - table that contains statistical information on - the possible values of this column (below). - - This field is a member of `oneof`_ ``tag``. - inferred (google.protobuf.empty_pb2.Empty): - If no semantic tag is indicated, we infer the - statistical model from the distribution of - values in the input data - - This field is a member of `oneof`_ ``tag``. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=2, - oneof='tag', - message=storage.InfoType, - ) - custom_tag: str = proto.Field( - proto.STRING, - number=3, - oneof='tag', - ) - inferred: empty_pb2.Empty = proto.Field( - proto.MESSAGE, - number=4, - oneof='tag', - message=empty_pb2.Empty, - ) - - class AuxiliaryTable(proto.Message): - r"""An auxiliary table contains statistical information on the - relative frequency of different quasi-identifiers values. It has - one or several quasi-identifiers columns, and one column that - indicates the relative frequency of each quasi-identifier tuple. - If a tuple is present in the data but not in the auxiliary - table, the corresponding relative frequency is assumed to be - zero (and thus, the tuple is highly reidentifiable). - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Required. Auxiliary table location. - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField]): - Required. Quasi-identifier columns. - relative_frequency (google.cloud.dlp_v2.types.FieldId): - Required. The relative frequency column must - contain a floating-point number between 0 and 1 - (inclusive). Null values are assumed to be zero. - """ - - class QuasiIdField(proto.Message): - r"""A quasi-identifier column has a custom_tag, used to know which - column in the data corresponds to which column in the statistical - model. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Identifies the column. - custom_tag (str): - A auxiliary field. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - custom_tag: str = proto.Field( - proto.STRING, - number=2, - ) - - table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=3, - message=storage.BigQueryTable, - ) - quasi_ids: MutableSequence['PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField', - ) - relative_frequency: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - - quasi_ids: MutableSequence['PrivacyMetric.KMapEstimationConfig.TaggedField'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='PrivacyMetric.KMapEstimationConfig.TaggedField', - ) - region_code: str = proto.Field( - proto.STRING, - number=2, - ) - auxiliary_tables: MutableSequence['PrivacyMetric.KMapEstimationConfig.AuxiliaryTable'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable', - ) - - class DeltaPresenceEstimationConfig(proto.Message): - r"""δ-presence metric, used to estimate how likely it is for an - attacker to figure out that one given individual appears in a - de-identified dataset. Similarly to the k-map metric, we cannot - compute δ-presence exactly without knowing the attack dataset, - so we use a statistical model instead. - - Attributes: - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.QuasiId]): - Required. Fields considered to be - quasi-identifiers. No two fields can have the - same tag. - region_code (str): - ISO 3166-1 alpha-2 region code to use in the statistical - modeling. Set if no column is tagged with a region-specific - InfoType (like US_ZIP_5) or a region code. - auxiliary_tables (MutableSequence[google.cloud.dlp_v2.types.StatisticalTable]): - Several auxiliary tables can be used in the analysis. Each - custom_tag used to tag a quasi-identifiers field must appear - in exactly one field of one auxiliary table. - """ - - quasi_ids: MutableSequence['QuasiId'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='QuasiId', - ) - region_code: str = proto.Field( - proto.STRING, - number=2, - ) - auxiliary_tables: MutableSequence['StatisticalTable'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='StatisticalTable', - ) - - numerical_stats_config: NumericalStatsConfig = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=NumericalStatsConfig, - ) - categorical_stats_config: CategoricalStatsConfig = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message=CategoricalStatsConfig, - ) - k_anonymity_config: KAnonymityConfig = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message=KAnonymityConfig, - ) - l_diversity_config: LDiversityConfig = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message=LDiversityConfig, - ) - k_map_estimation_config: KMapEstimationConfig = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message=KMapEstimationConfig, - ) - delta_presence_estimation_config: DeltaPresenceEstimationConfig = proto.Field( - proto.MESSAGE, - number=6, - oneof='type', - message=DeltaPresenceEstimationConfig, - ) - - -class AnalyzeDataSourceRiskDetails(proto.Message): - r"""Result of a risk analysis operation request. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - requested_privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): - Privacy metric to compute. - requested_source_table (google.cloud.dlp_v2.types.BigQueryTable): - Input dataset to compute metrics over. - numerical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.NumericalStatsResult): - Numerical stats result - - This field is a member of `oneof`_ ``result``. - categorical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult): - Categorical stats result - - This field is a member of `oneof`_ ``result``. - k_anonymity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult): - K-anonymity result - - This field is a member of `oneof`_ ``result``. - l_diversity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult): - L-divesity result - - This field is a member of `oneof`_ ``result``. - k_map_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult): - K-map result - - This field is a member of `oneof`_ ``result``. - delta_presence_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult): - Delta-presence result - - This field is a member of `oneof`_ ``result``. - requested_options (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.RequestedRiskAnalysisOptions): - The configuration used for this job. - """ - - class NumericalStatsResult(proto.Message): - r"""Result of the numerical stats computation. - - Attributes: - min_value (google.cloud.dlp_v2.types.Value): - Minimum value appearing in the column. - max_value (google.cloud.dlp_v2.types.Value): - Maximum value appearing in the column. - quantile_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - List of 99 values that partition the set of - field values into 100 equal sized buckets. - """ - - min_value: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - max_value: 'Value' = proto.Field( - proto.MESSAGE, - number=2, - message='Value', - ) - quantile_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='Value', - ) - - class CategoricalStatsResult(proto.Message): - r"""Result of the categorical stats computation. - - Attributes: - value_frequency_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket]): - Histogram of value frequencies in the column. - """ - - class CategoricalStatsHistogramBucket(proto.Message): - r"""Histogram of value frequencies in the column. - - Attributes: - value_frequency_lower_bound (int): - Lower bound on the value frequency of the - values in this bucket. - value_frequency_upper_bound (int): - Upper bound on the value frequency of the - values in this bucket. - bucket_size (int): - Total number of values in this bucket. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.ValueFrequency]): - Sample of value frequencies in this bucket. - The total number of values returned per bucket - is capped at 20. - bucket_value_count (int): - Total number of distinct values in this - bucket. - """ - - value_frequency_lower_bound: int = proto.Field( - proto.INT64, - number=1, - ) - value_frequency_upper_bound: int = proto.Field( - proto.INT64, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=3, - ) - bucket_values: MutableSequence['ValueFrequency'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='ValueFrequency', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=5, - ) - - value_frequency_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket', - ) - - class KAnonymityResult(proto.Message): - r"""Result of the k-anonymity computation. - - Attributes: - equivalence_class_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket]): - Histogram of k-anonymity equivalence classes. - """ - - class KAnonymityEquivalenceClass(proto.Message): - r"""The set of columns' values that share the same ldiversity - value - - Attributes: - quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - Set of values defining the equivalence class. - One value per quasi-identifier column in the - original KAnonymity metric message. The order is - always the same as the original request. - equivalence_class_size (int): - Size of the equivalence class, for example - number of rows with the above set of values. - """ - - quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - equivalence_class_size: int = proto.Field( - proto.INT64, - number=2, - ) - - class KAnonymityHistogramBucket(proto.Message): - r"""Histogram of k-anonymity equivalence classes. - - Attributes: - equivalence_class_size_lower_bound (int): - Lower bound on the size of the equivalence - classes in this bucket. - equivalence_class_size_upper_bound (int): - Upper bound on the size of the equivalence - classes in this bucket. - bucket_size (int): - Total number of equivalence classes in this - bucket. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass]): - Sample of equivalence classes in this bucket. - The total number of classes returned per bucket - is capped at 20. - bucket_value_count (int): - Total number of distinct equivalence classes - in this bucket. - """ - - equivalence_class_size_lower_bound: int = proto.Field( - proto.INT64, - number=1, - ) - equivalence_class_size_upper_bound: int = proto.Field( - proto.INT64, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=3, - ) - bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=5, - ) - - equivalence_class_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket', - ) - - class LDiversityResult(proto.Message): - r"""Result of the l-diversity computation. - - Attributes: - sensitive_value_frequency_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket]): - Histogram of l-diversity equivalence class - sensitive value frequencies. - """ - - class LDiversityEquivalenceClass(proto.Message): - r"""The set of columns' values that share the same ldiversity - value. - - Attributes: - quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - Quasi-identifier values defining the - k-anonymity equivalence class. The order is - always the same as the original request. - equivalence_class_size (int): - Size of the k-anonymity equivalence class. - num_distinct_sensitive_values (int): - Number of distinct sensitive values in this - equivalence class. - top_sensitive_values (MutableSequence[google.cloud.dlp_v2.types.ValueFrequency]): - Estimated frequencies of top sensitive - values. - """ - - quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - equivalence_class_size: int = proto.Field( - proto.INT64, - number=2, - ) - num_distinct_sensitive_values: int = proto.Field( - proto.INT64, - number=3, - ) - top_sensitive_values: MutableSequence['ValueFrequency'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='ValueFrequency', - ) - - class LDiversityHistogramBucket(proto.Message): - r"""Histogram of l-diversity equivalence class sensitive value - frequencies. - - Attributes: - sensitive_value_frequency_lower_bound (int): - Lower bound on the sensitive value - frequencies of the equivalence classes in this - bucket. - sensitive_value_frequency_upper_bound (int): - Upper bound on the sensitive value - frequencies of the equivalence classes in this - bucket. - bucket_size (int): - Total number of equivalence classes in this - bucket. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass]): - Sample of equivalence classes in this bucket. - The total number of classes returned per bucket - is capped at 20. - bucket_value_count (int): - Total number of distinct equivalence classes - in this bucket. - """ - - sensitive_value_frequency_lower_bound: int = proto.Field( - proto.INT64, - number=1, - ) - sensitive_value_frequency_upper_bound: int = proto.Field( - proto.INT64, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=3, - ) - bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=5, - ) - - sensitive_value_frequency_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket', - ) - - class KMapEstimationResult(proto.Message): - r"""Result of the reidentifiability analysis. Note that these - results are an estimation, not exact values. - - Attributes: - k_map_estimation_histogram (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket]): - The intervals [min_anonymity, max_anonymity] do not overlap. - If a value doesn't correspond to any such interval, the - associated frequency is zero. For example, the following - records: {min_anonymity: 1, max_anonymity: 1, frequency: 17} - {min_anonymity: 2, max_anonymity: 3, frequency: 42} - {min_anonymity: 5, max_anonymity: 10, frequency: 99} mean - that there are no record with an estimated anonymity of 4, - 5, or larger than 10. - """ - - class KMapEstimationQuasiIdValues(proto.Message): - r"""A tuple of values for the quasi-identifier columns. - - Attributes: - quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - The quasi-identifier values. - estimated_anonymity (int): - The estimated anonymity for these - quasi-identifier values. - """ - - quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - estimated_anonymity: int = proto.Field( - proto.INT64, - number=2, - ) - - class KMapEstimationHistogramBucket(proto.Message): - r"""A KMapEstimationHistogramBucket message with the following values: - min_anonymity: 3 max_anonymity: 5 frequency: 42 means that there are - 42 records whose quasi-identifier values correspond to 3, 4 or 5 - people in the overlying population. An important particular case is - when min_anonymity = max_anonymity = 1: the frequency field then - corresponds to the number of uniquely identifiable records. - - Attributes: - min_anonymity (int): - Always positive. - max_anonymity (int): - Always greater than or equal to min_anonymity. - bucket_size (int): - Number of records within these anonymity - bounds. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues]): - Sample of quasi-identifier tuple values in - this bucket. The total number of classes - returned per bucket is capped at 20. - bucket_value_count (int): - Total number of distinct quasi-identifier - tuple values in this bucket. - """ - - min_anonymity: int = proto.Field( - proto.INT64, - number=1, - ) - max_anonymity: int = proto.Field( - proto.INT64, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=5, - ) - bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=7, - ) - - k_map_estimation_histogram: MutableSequence['AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket', - ) - - class DeltaPresenceEstimationResult(proto.Message): - r"""Result of the δ-presence computation. Note that these results - are an estimation, not exact values. - - Attributes: - delta_presence_estimation_histogram (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket]): - The intervals [min_probability, max_probability) do not - overlap. If a value doesn't correspond to any such interval, - the associated frequency is zero. For example, the following - records: {min_probability: 0, max_probability: 0.1, - frequency: 17} {min_probability: 0.2, max_probability: 0.3, - frequency: 42} {min_probability: 0.3, max_probability: 0.4, - frequency: 99} mean that there are no record with an - estimated probability in [0.1, 0.2) nor larger or equal to - 0.4. - """ - - class DeltaPresenceEstimationQuasiIdValues(proto.Message): - r"""A tuple of values for the quasi-identifier columns. - - Attributes: - quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - The quasi-identifier values. - estimated_probability (float): - The estimated probability that a given individual sharing - these quasi-identifier values is in the dataset. This value, - typically called δ, is the ratio between the number of - records in the dataset with these quasi-identifier values, - and the total number of individuals (inside *and* outside - the dataset) with these quasi-identifier values. For - example, if there are 15 individuals in the dataset who - share the same quasi-identifier values, and an estimated 100 - people in the entire population with these values, then δ is - 0.15. - """ - - quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - estimated_probability: float = proto.Field( - proto.DOUBLE, - number=2, - ) - - class DeltaPresenceEstimationHistogramBucket(proto.Message): - r"""A DeltaPresenceEstimationHistogramBucket message with the following - values: min_probability: 0.1 max_probability: 0.2 frequency: 42 - means that there are 42 records for which δ is in [0.1, 0.2). An - important particular case is when min_probability = max_probability - = 1: then, every individual who shares this quasi-identifier - combination is in the dataset. - - Attributes: - min_probability (float): - Between 0 and 1. - max_probability (float): - Always greater than or equal to min_probability. - bucket_size (int): - Number of records within these probability - bounds. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues]): - Sample of quasi-identifier tuple values in - this bucket. The total number of classes - returned per bucket is capped at 20. - bucket_value_count (int): - Total number of distinct quasi-identifier - tuple values in this bucket. - """ - - min_probability: float = proto.Field( - proto.DOUBLE, - number=1, - ) - max_probability: float = proto.Field( - proto.DOUBLE, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=5, - ) - bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=7, - ) - - delta_presence_estimation_histogram: MutableSequence['AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket', - ) - - class RequestedRiskAnalysisOptions(proto.Message): - r"""Risk analysis options. - - Attributes: - job_config (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): - The job config for the risk job. - """ - - job_config: 'RiskAnalysisJobConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='RiskAnalysisJobConfig', - ) - - requested_privacy_metric: 'PrivacyMetric' = proto.Field( - proto.MESSAGE, - number=1, - message='PrivacyMetric', - ) - requested_source_table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=2, - message=storage.BigQueryTable, - ) - numerical_stats_result: NumericalStatsResult = proto.Field( - proto.MESSAGE, - number=3, - oneof='result', - message=NumericalStatsResult, - ) - categorical_stats_result: CategoricalStatsResult = proto.Field( - proto.MESSAGE, - number=4, - oneof='result', - message=CategoricalStatsResult, - ) - k_anonymity_result: KAnonymityResult = proto.Field( - proto.MESSAGE, - number=5, - oneof='result', - message=KAnonymityResult, - ) - l_diversity_result: LDiversityResult = proto.Field( - proto.MESSAGE, - number=6, - oneof='result', - message=LDiversityResult, - ) - k_map_estimation_result: KMapEstimationResult = proto.Field( - proto.MESSAGE, - number=7, - oneof='result', - message=KMapEstimationResult, - ) - delta_presence_estimation_result: DeltaPresenceEstimationResult = proto.Field( - proto.MESSAGE, - number=9, - oneof='result', - message=DeltaPresenceEstimationResult, - ) - requested_options: RequestedRiskAnalysisOptions = proto.Field( - proto.MESSAGE, - number=10, - message=RequestedRiskAnalysisOptions, - ) - - -class ValueFrequency(proto.Message): - r"""A value of a field, including its frequency. - - Attributes: - value (google.cloud.dlp_v2.types.Value): - A value contained in the field in question. - count (int): - How many times the value is contained in the - field. - """ - - value: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - count: int = proto.Field( - proto.INT64, - number=2, - ) - - -class Value(proto.Message): - r"""Set of primitive values supported by the system. Note that for the - purposes of inspection or transformation, the number of bytes - considered to comprise a 'Value' is based on its representation as a - UTF-8 encoded string. For example, if 'integer_value' is set to - 123456789, the number of bytes would be counted as 9, even though an - int64 only holds up to 8 bytes of data. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - integer_value (int): - integer - - This field is a member of `oneof`_ ``type``. - float_value (float): - float - - This field is a member of `oneof`_ ``type``. - string_value (str): - string - - This field is a member of `oneof`_ ``type``. - boolean_value (bool): - boolean - - This field is a member of `oneof`_ ``type``. - timestamp_value (google.protobuf.timestamp_pb2.Timestamp): - timestamp - - This field is a member of `oneof`_ ``type``. - time_value (google.type.timeofday_pb2.TimeOfDay): - time of day - - This field is a member of `oneof`_ ``type``. - date_value (google.type.date_pb2.Date): - date - - This field is a member of `oneof`_ ``type``. - day_of_week_value (google.type.dayofweek_pb2.DayOfWeek): - day of week - - This field is a member of `oneof`_ ``type``. - """ - - integer_value: int = proto.Field( - proto.INT64, - number=1, - oneof='type', - ) - float_value: float = proto.Field( - proto.DOUBLE, - number=2, - oneof='type', - ) - string_value: str = proto.Field( - proto.STRING, - number=3, - oneof='type', - ) - boolean_value: bool = proto.Field( - proto.BOOL, - number=4, - oneof='type', - ) - timestamp_value: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message=timestamp_pb2.Timestamp, - ) - time_value: timeofday_pb2.TimeOfDay = proto.Field( - proto.MESSAGE, - number=6, - oneof='type', - message=timeofday_pb2.TimeOfDay, - ) - date_value: date_pb2.Date = proto.Field( - proto.MESSAGE, - number=7, - oneof='type', - message=date_pb2.Date, - ) - day_of_week_value: dayofweek_pb2.DayOfWeek = proto.Field( - proto.ENUM, - number=8, - oneof='type', - enum=dayofweek_pb2.DayOfWeek, - ) - - -class QuoteInfo(proto.Message): - r"""Message for infoType-dependent details parsed from quote. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - date_time (google.cloud.dlp_v2.types.DateTime): - The date time indicated by the quote. - - This field is a member of `oneof`_ ``parsed_quote``. - """ - - date_time: 'DateTime' = proto.Field( - proto.MESSAGE, - number=2, - oneof='parsed_quote', - message='DateTime', - ) - - -class DateTime(proto.Message): - r"""Message for a date time object. - e.g. 2018-01-01, 5th August. - - Attributes: - date (google.type.date_pb2.Date): - One or more of the following must be set. - Must be a valid date or time value. - day_of_week (google.type.dayofweek_pb2.DayOfWeek): - Day of week - time (google.type.timeofday_pb2.TimeOfDay): - Time of day - time_zone (google.cloud.dlp_v2.types.DateTime.TimeZone): - Time zone - """ - - class TimeZone(proto.Message): - r"""Time zone of the date time object. - - Attributes: - offset_minutes (int): - Set only if the offset can be determined. - Positive for time ahead of UTC. E.g. For - "UTC-9", this value is -540. - """ - - offset_minutes: int = proto.Field( - proto.INT32, - number=1, - ) - - date: date_pb2.Date = proto.Field( - proto.MESSAGE, - number=1, - message=date_pb2.Date, - ) - day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( - proto.ENUM, - number=2, - enum=dayofweek_pb2.DayOfWeek, - ) - time: timeofday_pb2.TimeOfDay = proto.Field( - proto.MESSAGE, - number=3, - message=timeofday_pb2.TimeOfDay, - ) - time_zone: TimeZone = proto.Field( - proto.MESSAGE, - number=4, - message=TimeZone, - ) - - -class DeidentifyConfig(proto.Message): - r"""The configuration that controls how the data will change. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): - Treat the dataset as free-form text and apply - the same free text transformation everywhere. - - This field is a member of `oneof`_ ``transformation``. - record_transformations (google.cloud.dlp_v2.types.RecordTransformations): - Treat the dataset as structured. - Transformations can be applied to specific - locations within structured datasets, such as - transforming a column within a table. - - This field is a member of `oneof`_ ``transformation``. - image_transformations (google.cloud.dlp_v2.types.ImageTransformations): - Treat the dataset as an image and redact. - - This field is a member of `oneof`_ ``transformation``. - transformation_error_handling (google.cloud.dlp_v2.types.TransformationErrorHandling): - Mode for handling transformation errors. If left - unspecified, the default mode is - ``TransformationErrorHandling.ThrowError``. - """ - - info_type_transformations: 'InfoTypeTransformations' = proto.Field( - proto.MESSAGE, - number=1, - oneof='transformation', - message='InfoTypeTransformations', - ) - record_transformations: 'RecordTransformations' = proto.Field( - proto.MESSAGE, - number=2, - oneof='transformation', - message='RecordTransformations', - ) - image_transformations: 'ImageTransformations' = proto.Field( - proto.MESSAGE, - number=4, - oneof='transformation', - message='ImageTransformations', - ) - transformation_error_handling: 'TransformationErrorHandling' = proto.Field( - proto.MESSAGE, - number=3, - message='TransformationErrorHandling', - ) - - -class ImageTransformations(proto.Message): - r"""A type of transformation that is applied over images. - - Attributes: - transforms (MutableSequence[google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation]): - - """ - - class ImageTransformation(proto.Message): - r"""Configuration for determining how redaction of images should - occur. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - selected_info_types (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.SelectedInfoTypes): - Apply transformation to the selected info_types. - - This field is a member of `oneof`_ ``target``. - all_info_types (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.AllInfoTypes): - Apply transformation to all findings not specified in other - ImageTransformation's selected_info_types. Only one instance - is allowed within the ImageTransformations message. - - This field is a member of `oneof`_ ``target``. - all_text (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.AllText): - Apply transformation to all text that doesn't - match an infoType. Only one instance is allowed - within the ImageTransformations message. - - This field is a member of `oneof`_ ``target``. - redaction_color (google.cloud.dlp_v2.types.Color): - The color to use when redacting content from - an image. If not specified, the default is - black. - """ - - class SelectedInfoTypes(proto.Message): - r"""Apply transformation to the selected info_types. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - Required. InfoTypes to apply the - transformation to. Required. Provided InfoType - must be unique within the ImageTransformations - message. - """ - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=storage.InfoType, - ) - - class AllInfoTypes(proto.Message): - r"""Apply transformation to all findings. - """ - - class AllText(proto.Message): - r"""Apply to all text. - """ - - selected_info_types: 'ImageTransformations.ImageTransformation.SelectedInfoTypes' = proto.Field( - proto.MESSAGE, - number=4, - oneof='target', - message='ImageTransformations.ImageTransformation.SelectedInfoTypes', - ) - all_info_types: 'ImageTransformations.ImageTransformation.AllInfoTypes' = proto.Field( - proto.MESSAGE, - number=5, - oneof='target', - message='ImageTransformations.ImageTransformation.AllInfoTypes', - ) - all_text: 'ImageTransformations.ImageTransformation.AllText' = proto.Field( - proto.MESSAGE, - number=6, - oneof='target', - message='ImageTransformations.ImageTransformation.AllText', - ) - redaction_color: 'Color' = proto.Field( - proto.MESSAGE, - number=3, - message='Color', - ) - - transforms: MutableSequence[ImageTransformation] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=ImageTransformation, - ) - - -class TransformationErrorHandling(proto.Message): - r"""How to handle transformation errors during de-identification. A - transformation error occurs when the requested transformation is - incompatible with the data. For example, trying to de-identify an IP - address using a ``DateShift`` transformation would result in a - transformation error, since date info cannot be extracted from an IP - address. Information about any incompatible transformations, and how - they were handled, is returned in the response as part of the - ``TransformationOverviews``. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - throw_error (google.cloud.dlp_v2.types.TransformationErrorHandling.ThrowError): - Throw an error - - This field is a member of `oneof`_ ``mode``. - leave_untransformed (google.cloud.dlp_v2.types.TransformationErrorHandling.LeaveUntransformed): - Ignore errors - - This field is a member of `oneof`_ ``mode``. - """ - - class ThrowError(proto.Message): - r"""Throw an error and fail the request when a transformation - error occurs. - - """ - - class LeaveUntransformed(proto.Message): - r"""Skips the data without modifying it if the requested transformation - would cause an error. For example, if a ``DateShift`` transformation - were applied an an IP address, this mode would leave the IP address - unchanged in the response. - - """ - - throw_error: ThrowError = proto.Field( - proto.MESSAGE, - number=1, - oneof='mode', - message=ThrowError, - ) - leave_untransformed: LeaveUntransformed = proto.Field( - proto.MESSAGE, - number=2, - oneof='mode', - message=LeaveUntransformed, - ) - - -class PrimitiveTransformation(proto.Message): - r"""A rule for transforming a value. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - replace_config (google.cloud.dlp_v2.types.ReplaceValueConfig): - Replace with a specified value. - - This field is a member of `oneof`_ ``transformation``. - redact_config (google.cloud.dlp_v2.types.RedactConfig): - Redact - - This field is a member of `oneof`_ ``transformation``. - character_mask_config (google.cloud.dlp_v2.types.CharacterMaskConfig): - Mask - - This field is a member of `oneof`_ ``transformation``. - crypto_replace_ffx_fpe_config (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig): - Ffx-Fpe - - This field is a member of `oneof`_ ``transformation``. - fixed_size_bucketing_config (google.cloud.dlp_v2.types.FixedSizeBucketingConfig): - Fixed size bucketing - - This field is a member of `oneof`_ ``transformation``. - bucketing_config (google.cloud.dlp_v2.types.BucketingConfig): - Bucketing - - This field is a member of `oneof`_ ``transformation``. - replace_with_info_type_config (google.cloud.dlp_v2.types.ReplaceWithInfoTypeConfig): - Replace with infotype - - This field is a member of `oneof`_ ``transformation``. - time_part_config (google.cloud.dlp_v2.types.TimePartConfig): - Time extraction - - This field is a member of `oneof`_ ``transformation``. - crypto_hash_config (google.cloud.dlp_v2.types.CryptoHashConfig): - Crypto - - This field is a member of `oneof`_ ``transformation``. - date_shift_config (google.cloud.dlp_v2.types.DateShiftConfig): - Date Shift - - This field is a member of `oneof`_ ``transformation``. - crypto_deterministic_config (google.cloud.dlp_v2.types.CryptoDeterministicConfig): - Deterministic Crypto - - This field is a member of `oneof`_ ``transformation``. - replace_dictionary_config (google.cloud.dlp_v2.types.ReplaceDictionaryConfig): - Replace with a value randomly drawn (with - replacement) from a dictionary. - - This field is a member of `oneof`_ ``transformation``. - """ - - replace_config: 'ReplaceValueConfig' = proto.Field( - proto.MESSAGE, - number=1, - oneof='transformation', - message='ReplaceValueConfig', - ) - redact_config: 'RedactConfig' = proto.Field( - proto.MESSAGE, - number=2, - oneof='transformation', - message='RedactConfig', - ) - character_mask_config: 'CharacterMaskConfig' = proto.Field( - proto.MESSAGE, - number=3, - oneof='transformation', - message='CharacterMaskConfig', - ) - crypto_replace_ffx_fpe_config: 'CryptoReplaceFfxFpeConfig' = proto.Field( - proto.MESSAGE, - number=4, - oneof='transformation', - message='CryptoReplaceFfxFpeConfig', - ) - fixed_size_bucketing_config: 'FixedSizeBucketingConfig' = proto.Field( - proto.MESSAGE, - number=5, - oneof='transformation', - message='FixedSizeBucketingConfig', - ) - bucketing_config: 'BucketingConfig' = proto.Field( - proto.MESSAGE, - number=6, - oneof='transformation', - message='BucketingConfig', - ) - replace_with_info_type_config: 'ReplaceWithInfoTypeConfig' = proto.Field( - proto.MESSAGE, - number=7, - oneof='transformation', - message='ReplaceWithInfoTypeConfig', - ) - time_part_config: 'TimePartConfig' = proto.Field( - proto.MESSAGE, - number=8, - oneof='transformation', - message='TimePartConfig', - ) - crypto_hash_config: 'CryptoHashConfig' = proto.Field( - proto.MESSAGE, - number=9, - oneof='transformation', - message='CryptoHashConfig', - ) - date_shift_config: 'DateShiftConfig' = proto.Field( - proto.MESSAGE, - number=11, - oneof='transformation', - message='DateShiftConfig', - ) - crypto_deterministic_config: 'CryptoDeterministicConfig' = proto.Field( - proto.MESSAGE, - number=12, - oneof='transformation', - message='CryptoDeterministicConfig', - ) - replace_dictionary_config: 'ReplaceDictionaryConfig' = proto.Field( - proto.MESSAGE, - number=13, - oneof='transformation', - message='ReplaceDictionaryConfig', - ) - - -class TimePartConfig(proto.Message): - r"""For use with ``Date``, ``Timestamp``, and ``TimeOfDay``, extract or - preserve a portion of the value. - - Attributes: - part_to_extract (google.cloud.dlp_v2.types.TimePartConfig.TimePart): - The part of the time to keep. - """ - class TimePart(proto.Enum): - r"""Components that make up time. - - Values: - TIME_PART_UNSPECIFIED (0): - Unused - YEAR (1): - [0-9999] - MONTH (2): - [1-12] - DAY_OF_MONTH (3): - [1-31] - DAY_OF_WEEK (4): - [1-7] - WEEK_OF_YEAR (5): - [1-53] - HOUR_OF_DAY (6): - [0-23] - """ - TIME_PART_UNSPECIFIED = 0 - YEAR = 1 - MONTH = 2 - DAY_OF_MONTH = 3 - DAY_OF_WEEK = 4 - WEEK_OF_YEAR = 5 - HOUR_OF_DAY = 6 - - part_to_extract: TimePart = proto.Field( - proto.ENUM, - number=1, - enum=TimePart, - ) - - -class CryptoHashConfig(proto.Message): - r"""Pseudonymization method that generates surrogates via - cryptographic hashing. Uses SHA-256. - The key size must be either 32 or 64 bytes. - Outputs a base64 encoded representation of the hashed output - (for example, L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=). - Currently, only string and integer values can be hashed. See - https://cloud.google.com/dlp/docs/pseudonymization to learn - more. - - Attributes: - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - The key used by the hash function. - """ - - crypto_key: 'CryptoKey' = proto.Field( - proto.MESSAGE, - number=1, - message='CryptoKey', - ) - - -class CryptoDeterministicConfig(proto.Message): - r"""Pseudonymization method that generates deterministic - encryption for the given input. Outputs a base64 encoded - representation of the encrypted output. Uses AES-SIV based on - the RFC https://tools.ietf.org/html/rfc5297. - - Attributes: - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - The key used by the encryption function. For - deterministic encryption using AES-SIV, the - provided key is internally expanded to 64 bytes - prior to use. - surrogate_info_type (google.cloud.dlp_v2.types.InfoType): - The custom info type to annotate the surrogate with. This - annotation will be applied to the surrogate by prefixing it - with the name of the custom info type followed by the number - of characters comprising the surrogate. The following scheme - defines the format: {info type name}({surrogate character - count}):{surrogate} - - For example, if the name of custom info type is - 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full - replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' - - This annotation identifies the surrogate when inspecting - content using the custom info type 'Surrogate'. This - facilitates reversal of the surrogate when it occurs in free - text. - - Note: For record transformations where the entire cell in a - table is being transformed, surrogates are not mandatory. - Surrogates are used to denote the location of the token and - are necessary for re-identification in free form text. - - In order for inspection to work properly, the name of this - info type must not occur naturally anywhere in your data; - otherwise, inspection may either - - - reverse a surrogate that does not correspond to an actual - identifier - - be unable to parse the surrogate and result in an error - - Therefore, choose your custom info type name carefully after - considering what your data looks like. One way to select a - name that has a high chance of yielding reliable detection - is to include one or more unicode characters that are highly - improbable to exist in your data. For example, assuming your - data is entered from a regular ASCII keyboard, the symbol - with the hex code point 29DD might be used like so: - ⧝MY_TOKEN_TYPE. - context (google.cloud.dlp_v2.types.FieldId): - A context may be used for higher security and maintaining - referential integrity such that the same identifier in two - different contexts will be given a distinct surrogate. The - context is appended to plaintext value being encrypted. On - decryption the provided context is validated against the - value used during encryption. If a context was provided - during encryption, same context must be provided during - decryption as well. - - If the context is not set, plaintext would be used as is for - encryption. If the context is set but: - - 1. there is no record present when transforming a given - value or - 2. the field is not present when transforming a given value, - - plaintext would be used as is for encryption. - - Note that case (1) is expected when an - ``InfoTypeTransformation`` is applied to both structured and - unstructured ``ContentItem``\ s. - """ - - crypto_key: 'CryptoKey' = proto.Field( - proto.MESSAGE, - number=1, - message='CryptoKey', - ) - surrogate_info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=2, - message=storage.InfoType, - ) - context: storage.FieldId = proto.Field( - proto.MESSAGE, - number=3, - message=storage.FieldId, - ) - - -class ReplaceValueConfig(proto.Message): - r"""Replace each input value with a given ``Value``. - - Attributes: - new_value (google.cloud.dlp_v2.types.Value): - Value to replace it with. - """ - - new_value: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - - -class ReplaceDictionaryConfig(proto.Message): - r"""Replace each input value with a value randomly selected from - the dictionary. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): - A list of words to select from for random replacement. The - `limits `__ page - contains details about the size limits of dictionaries. - - This field is a member of `oneof`_ ``type``. - """ - - word_list: storage.CustomInfoType.Dictionary.WordList = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.CustomInfoType.Dictionary.WordList, - ) - - -class ReplaceWithInfoTypeConfig(proto.Message): - r"""Replace each matching finding with the name of the info_type. - """ - - -class RedactConfig(proto.Message): - r"""Redact a given value. For example, if used with an - ``InfoTypeTransformation`` transforming PHONE_NUMBER, and input 'My - phone number is 206-555-0123', the output would be 'My phone number - is '. - - """ - - -class CharsToIgnore(proto.Message): - r"""Characters to skip when doing deidentification of a value. - These will be left alone and skipped. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - characters_to_skip (str): - Characters to not transform when masking. - - This field is a member of `oneof`_ ``characters``. - common_characters_to_ignore (google.cloud.dlp_v2.types.CharsToIgnore.CommonCharsToIgnore): - Common characters to not transform when - masking. Useful to avoid removing punctuation. - - This field is a member of `oneof`_ ``characters``. - """ - class CommonCharsToIgnore(proto.Enum): - r"""Convenience enum for indicating common characters to not - transform. - - Values: - COMMON_CHARS_TO_IGNORE_UNSPECIFIED (0): - Unused. - NUMERIC (1): - 0-9 - ALPHA_UPPER_CASE (2): - A-Z - ALPHA_LOWER_CASE (3): - a-z - PUNCTUATION (4): - US Punctuation, one of !"#$%&'()*+,-./:;<=>?@[]^_`{|}~ - WHITESPACE (5): - Whitespace character, one of [ \\t\n\x0B\f\r] - """ - COMMON_CHARS_TO_IGNORE_UNSPECIFIED = 0 - NUMERIC = 1 - ALPHA_UPPER_CASE = 2 - ALPHA_LOWER_CASE = 3 - PUNCTUATION = 4 - WHITESPACE = 5 - - characters_to_skip: str = proto.Field( - proto.STRING, - number=1, - oneof='characters', - ) - common_characters_to_ignore: CommonCharsToIgnore = proto.Field( - proto.ENUM, - number=2, - oneof='characters', - enum=CommonCharsToIgnore, - ) - - -class CharacterMaskConfig(proto.Message): - r"""Partially mask a string by replacing a given number of characters - with a fixed character. Masking can start from the beginning or end - of the string. This can be used on data of any type (numbers, longs, - and so on) and when de-identifying structured data we'll attempt to - preserve the original data's type. (This allows you to take a long - like 123 and modify it to a string like \**3. - - Attributes: - masking_character (str): - Character to use to mask the sensitive values—for example, - ``*`` for an alphabetic string such as a name, or ``0`` for - a numeric string such as ZIP code or credit card number. - This string must have a length of 1. If not supplied, this - value defaults to ``*`` for strings, and ``0`` for digits. - number_to_mask (int): - Number of characters to mask. If not set, all matching chars - will be masked. Skipped characters do not count towards this - tally. - - If ``number_to_mask`` is negative, this denotes inverse - masking. Cloud DLP masks all but a number of characters. For - example, suppose you have the following values: - - - ``masking_character`` is ``*`` - - ``number_to_mask`` is ``-4`` - - ``reverse_order`` is ``false`` - - ``CharsToIgnore`` includes ``-`` - - Input string is ``1234-5678-9012-3456`` - - The resulting de-identified string is - ``****-****-****-3456``. Cloud DLP masks all but the last - four characters. If ``reverse_order`` is ``true``, all but - the first four characters are masked as - ``1234-****-****-****``. - reverse_order (bool): - Mask characters in reverse order. For example, if - ``masking_character`` is ``0``, ``number_to_mask`` is - ``14``, and ``reverse_order`` is ``false``, then the input - string ``1234-5678-9012-3456`` is masked as - ``00000000000000-3456``. If ``masking_character`` is ``*``, - ``number_to_mask`` is ``3``, and ``reverse_order`` is - ``true``, then the string ``12345`` is masked as ``12***``. - characters_to_ignore (MutableSequence[google.cloud.dlp_v2.types.CharsToIgnore]): - When masking a string, items in this list will be skipped - when replacing characters. For example, if the input string - is ``555-555-5555`` and you instruct Cloud DLP to skip ``-`` - and mask 5 characters with ``*``, Cloud DLP returns - ``***-**5-5555``. - """ - - masking_character: str = proto.Field( - proto.STRING, - number=1, - ) - number_to_mask: int = proto.Field( - proto.INT32, - number=2, - ) - reverse_order: bool = proto.Field( - proto.BOOL, - number=3, - ) - characters_to_ignore: MutableSequence['CharsToIgnore'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='CharsToIgnore', - ) - - -class FixedSizeBucketingConfig(proto.Message): - r"""Buckets values based on fixed size ranges. The Bucketing - transformation can provide all of this functionality, but requires - more configuration. This message is provided as a convenience to the - user for simple bucketing strategies. - - The transformed value will be a hyphenated string of - {lower_bound}-{upper_bound}. For example, if lower_bound = 10 and - upper_bound = 20, all values that are within this bucket will be - replaced with "10-20". - - This can be used on data of type: double, long. - - If the bound Value type differs from the type of data being - transformed, we will first attempt converting the type of the data - to be transformed to match the type of the bound before comparing. - - See https://cloud.google.com/dlp/docs/concepts-bucketing to learn - more. - - Attributes: - lower_bound (google.cloud.dlp_v2.types.Value): - Required. Lower bound value of buckets. All values less than - ``lower_bound`` are grouped together into a single bucket; - for example if ``lower_bound`` = 10, then all values less - than 10 are replaced with the value "-10". - upper_bound (google.cloud.dlp_v2.types.Value): - Required. Upper bound value of buckets. All values greater - than upper_bound are grouped together into a single bucket; - for example if ``upper_bound`` = 89, then all values greater - than 89 are replaced with the value "89+". - bucket_size (float): - Required. Size of each bucket (except for minimum and - maximum buckets). So if ``lower_bound`` = 10, - ``upper_bound`` = 89, and ``bucket_size`` = 10, then the - following buckets would be used: -10, 10-20, 20-30, 30-40, - 40-50, 50-60, 60-70, 70-80, 80-89, 89+. Precision up to 2 - decimals works. - """ - - lower_bound: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - upper_bound: 'Value' = proto.Field( - proto.MESSAGE, - number=2, - message='Value', - ) - bucket_size: float = proto.Field( - proto.DOUBLE, - number=3, - ) - - -class BucketingConfig(proto.Message): - r"""Generalization function that buckets values based on ranges. The - ranges and replacement values are dynamically provided by the user - for custom behavior, such as 1-30 -> LOW 31-65 -> MEDIUM 66-100 -> - HIGH This can be used on data of type: number, long, string, - timestamp. If the bound ``Value`` type differs from the type of data - being transformed, we will first attempt converting the type of the - data to be transformed to match the type of the bound before - comparing. See https://cloud.google.com/dlp/docs/concepts-bucketing - to learn more. - - Attributes: - buckets (MutableSequence[google.cloud.dlp_v2.types.BucketingConfig.Bucket]): - Set of buckets. Ranges must be - non-overlapping. - """ - - class Bucket(proto.Message): - r"""Bucket is represented as a range, along with replacement - values. - - Attributes: - min_ (google.cloud.dlp_v2.types.Value): - Lower bound of the range, inclusive. Type - should be the same as max if used. - max_ (google.cloud.dlp_v2.types.Value): - Upper bound of the range, exclusive; type - must match min. - replacement_value (google.cloud.dlp_v2.types.Value): - Required. Replacement value for this bucket. - """ - - min_: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - max_: 'Value' = proto.Field( - proto.MESSAGE, - number=2, - message='Value', - ) - replacement_value: 'Value' = proto.Field( - proto.MESSAGE, - number=3, - message='Value', - ) - - buckets: MutableSequence[Bucket] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=Bucket, - ) - - -class CryptoReplaceFfxFpeConfig(proto.Message): - r"""Replaces an identifier with a surrogate using Format Preserving - Encryption (FPE) with the FFX mode of operation; however when used - in the ``ReidentifyContent`` API method, it serves the opposite - function by reversing the surrogate back into the original - identifier. The identifier must be encoded as ASCII. For a given - crypto key and context, the same identifier will be replaced with - the same surrogate. Identifiers must be at least two characters - long. In the case that the identifier is the empty string, it will - be skipped. See https://cloud.google.com/dlp/docs/pseudonymization - to learn more. - - Note: We recommend using CryptoDeterministicConfig for all use cases - which do not require preserving the input alphabet space and size, - plus warrant referential integrity. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - Required. The key used by the encryption - algorithm. - context (google.cloud.dlp_v2.types.FieldId): - The 'tweak', a context may be used for higher security since - the same identifier in two different contexts won't be given - the same surrogate. If the context is not set, a default - tweak will be used. - - If the context is set but: - - 1. there is no record present when transforming a given - value or - 2. the field is not present when transforming a given value, - - a default tweak will be used. - - Note that case (1) is expected when an - ``InfoTypeTransformation`` is applied to both structured and - unstructured ``ContentItem``\ s. Currently, the referenced - field may be of value type integer or string. - - The tweak is constructed as a sequence of bytes in big - endian byte order such that: - - - a 64 bit integer is encoded followed by a single byte of - value 1 - - a string is encoded in UTF-8 format followed by a single - byte of value 2 - common_alphabet (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig.FfxCommonNativeAlphabet): - Common alphabets. - - This field is a member of `oneof`_ ``alphabet``. - custom_alphabet (str): - This is supported by mapping these to the alphanumeric - characters that the FFX mode natively supports. This happens - before/after encryption/decryption. Each character listed - must appear only once. Number of characters must be in the - range [2, 95]. This must be encoded as ASCII. The order of - characters does not matter. The full list of allowed - characters is: - 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz - ~`!@#$%^&*()_-+={[}]|:;"'<,>.?/ - - This field is a member of `oneof`_ ``alphabet``. - radix (int): - The native way to select the alphabet. Must be in the range - [2, 95]. - - This field is a member of `oneof`_ ``alphabet``. - surrogate_info_type (google.cloud.dlp_v2.types.InfoType): - The custom infoType to annotate the surrogate with. This - annotation will be applied to the surrogate by prefixing it - with the name of the custom infoType followed by the number - of characters comprising the surrogate. The following scheme - defines the format: - info_type_name(surrogate_character_count):surrogate - - For example, if the name of custom infoType is - 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full - replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' - - This annotation identifies the surrogate when inspecting - content using the custom infoType - ```SurrogateType`` `__. - This facilitates reversal of the surrogate when it occurs in - free text. - - In order for inspection to work properly, the name of this - infoType must not occur naturally anywhere in your data; - otherwise, inspection may find a surrogate that does not - correspond to an actual identifier. Therefore, choose your - custom infoType name carefully after considering what your - data looks like. One way to select a name that has a high - chance of yielding reliable detection is to include one or - more unicode characters that are highly improbable to exist - in your data. For example, assuming your data is entered - from a regular ASCII keyboard, the symbol with the hex code - point 29DD might be used like so: ⧝MY_TOKEN_TYPE - """ - class FfxCommonNativeAlphabet(proto.Enum): - r"""These are commonly used subsets of the alphabet that the FFX - mode natively supports. In the algorithm, the alphabet is - selected using the "radix". Therefore each corresponds to a - particular radix. - - Values: - FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED (0): - Unused. - NUMERIC (1): - ``[0-9]`` (radix of 10) - HEXADECIMAL (2): - ``[0-9A-F]`` (radix of 16) - UPPER_CASE_ALPHA_NUMERIC (3): - ``[0-9A-Z]`` (radix of 36) - ALPHA_NUMERIC (4): - ``[0-9A-Za-z]`` (radix of 62) - """ - FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED = 0 - NUMERIC = 1 - HEXADECIMAL = 2 - UPPER_CASE_ALPHA_NUMERIC = 3 - ALPHA_NUMERIC = 4 - - crypto_key: 'CryptoKey' = proto.Field( - proto.MESSAGE, - number=1, - message='CryptoKey', - ) - context: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - common_alphabet: FfxCommonNativeAlphabet = proto.Field( - proto.ENUM, - number=4, - oneof='alphabet', - enum=FfxCommonNativeAlphabet, - ) - custom_alphabet: str = proto.Field( - proto.STRING, - number=5, - oneof='alphabet', - ) - radix: int = proto.Field( - proto.INT32, - number=6, - oneof='alphabet', - ) - surrogate_info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=8, - message=storage.InfoType, - ) - - -class CryptoKey(proto.Message): - r"""This is a data encryption key (DEK) (as opposed to - a key encryption key (KEK) stored by Cloud Key Management - Service (Cloud KMS). - When using Cloud KMS to wrap or unwrap a DEK, be sure to set an - appropriate IAM policy on the KEK to ensure an attacker cannot - unwrap the DEK. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - transient (google.cloud.dlp_v2.types.TransientCryptoKey): - Transient crypto key - - This field is a member of `oneof`_ ``source``. - unwrapped (google.cloud.dlp_v2.types.UnwrappedCryptoKey): - Unwrapped crypto key - - This field is a member of `oneof`_ ``source``. - kms_wrapped (google.cloud.dlp_v2.types.KmsWrappedCryptoKey): - Key wrapped using Cloud KMS - - This field is a member of `oneof`_ ``source``. - """ - - transient: 'TransientCryptoKey' = proto.Field( - proto.MESSAGE, - number=1, - oneof='source', - message='TransientCryptoKey', - ) - unwrapped: 'UnwrappedCryptoKey' = proto.Field( - proto.MESSAGE, - number=2, - oneof='source', - message='UnwrappedCryptoKey', - ) - kms_wrapped: 'KmsWrappedCryptoKey' = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message='KmsWrappedCryptoKey', - ) - - -class TransientCryptoKey(proto.Message): - r"""Use this to have a random data crypto key generated. - It will be discarded after the request finishes. - - Attributes: - name (str): - Required. Name of the key. This is an arbitrary string used - to differentiate different keys. A unique key is generated - per name: two separate ``TransientCryptoKey`` protos share - the same generated key if their names are the same. When the - data crypto key is generated, this name is not used in any - way (repeating the api call will result in a different key - being generated). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UnwrappedCryptoKey(proto.Message): - r"""Using raw keys is prone to security risks due to accidentally - leaking the key. Choose another type of key if possible. - - Attributes: - key (bytes): - Required. A 128/192/256 bit key. - """ - - key: bytes = proto.Field( - proto.BYTES, - number=1, - ) - - -class KmsWrappedCryptoKey(proto.Message): - r"""Include to use an existing data crypto key wrapped by KMS. The - wrapped key must be a 128-, 192-, or 256-bit key. Authorization - requires the following IAM permissions when sending a request to - perform a crypto transformation using a KMS-wrapped crypto key: - dlp.kms.encrypt - - For more information, see [Creating a wrapped key] - (https://cloud.google.com/dlp/docs/create-wrapped-key). - - Note: When you use Cloud KMS for cryptographic operations, `charges - apply `__. - - Attributes: - wrapped_key (bytes): - Required. The wrapped data crypto key. - crypto_key_name (str): - Required. The resource name of the KMS - CryptoKey to use for unwrapping. - """ - - wrapped_key: bytes = proto.Field( - proto.BYTES, - number=1, - ) - crypto_key_name: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DateShiftConfig(proto.Message): - r"""Shifts dates by random number of days, with option to be - consistent for the same context. See - https://cloud.google.com/dlp/docs/concepts-date-shifting to - learn more. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - upper_bound_days (int): - Required. Range of shift in days. Actual - shift will be selected at random within this - range (inclusive ends). Negative means shift to - earlier in time. Must not be more than 365250 - days (1000 years) each direction. - For example, 3 means shift date to at most 3 - days into the future. - lower_bound_days (int): - Required. For example, -5 means shift date to - at most 5 days back in the past. - context (google.cloud.dlp_v2.types.FieldId): - Points to the field that contains the - context, for example, an entity id. If set, must - also set cryptoKey. If set, shift will be - consistent for the given context. - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - Causes the shift to be computed based on this key and the - context. This results in the same shift for the same context - and crypto_key. If set, must also set context. Can only be - applied to table items. - - This field is a member of `oneof`_ ``method``. - """ - - upper_bound_days: int = proto.Field( - proto.INT32, - number=1, - ) - lower_bound_days: int = proto.Field( - proto.INT32, - number=2, - ) - context: storage.FieldId = proto.Field( - proto.MESSAGE, - number=3, - message=storage.FieldId, - ) - crypto_key: 'CryptoKey' = proto.Field( - proto.MESSAGE, - number=4, - oneof='method', - message='CryptoKey', - ) - - -class InfoTypeTransformations(proto.Message): - r"""A type of transformation that will scan unstructured text and apply - various ``PrimitiveTransformation``\ s to each finding, where the - transformation is applied to only values that were identified as a - specific info_type. - - Attributes: - transformations (MutableSequence[google.cloud.dlp_v2.types.InfoTypeTransformations.InfoTypeTransformation]): - Required. Transformation for each infoType. - Cannot specify more than one for a given - infoType. - """ - - class InfoTypeTransformation(proto.Message): - r"""A transformation to apply to text that is identified as a specific - info_type. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - InfoTypes to apply the transformation to. An empty list will - cause this transformation to apply to all findings that - correspond to infoTypes that were requested in - ``InspectConfig``. - primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): - Required. Primitive transformation to apply - to the infoType. - """ - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - primitive_transformation: 'PrimitiveTransformation' = proto.Field( - proto.MESSAGE, - number=2, - message='PrimitiveTransformation', - ) - - transformations: MutableSequence[InfoTypeTransformation] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=InfoTypeTransformation, - ) - - -class FieldTransformation(proto.Message): - r"""The transformation to apply to the field. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Required. Input field(s) to apply the transformation to. - When you have columns that reference their position within a - list, omit the index from the FieldId. FieldId name matching - ignores the index. For example, instead of - "contact.nums[0].type", use "contact.nums.type". - condition (google.cloud.dlp_v2.types.RecordCondition): - Only apply the transformation if the condition evaluates to - true for the given ``RecordCondition``. The conditions are - allowed to reference fields that are not used in the actual - transformation. - - Example Use Cases: - - - Apply a different bucket transformation to an age column - if the zip code column for the same record is within a - specific range. - - Redact a field if the date of birth field is greater than - 85. - primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): - Apply the transformation to the entire field. - - This field is a member of `oneof`_ ``transformation``. - info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): - Treat the contents of the field as free text, and - selectively transform content that matches an ``InfoType``. - - This field is a member of `oneof`_ ``transformation``. - """ - - fields: MutableSequence[storage.FieldId] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - condition: 'RecordCondition' = proto.Field( - proto.MESSAGE, - number=3, - message='RecordCondition', - ) - primitive_transformation: 'PrimitiveTransformation' = proto.Field( - proto.MESSAGE, - number=4, - oneof='transformation', - message='PrimitiveTransformation', - ) - info_type_transformations: 'InfoTypeTransformations' = proto.Field( - proto.MESSAGE, - number=5, - oneof='transformation', - message='InfoTypeTransformations', - ) - - -class RecordTransformations(proto.Message): - r"""A type of transformation that is applied over structured data - such as a table. - - Attributes: - field_transformations (MutableSequence[google.cloud.dlp_v2.types.FieldTransformation]): - Transform the record by applying various - field transformations. - record_suppressions (MutableSequence[google.cloud.dlp_v2.types.RecordSuppression]): - Configuration defining which records get - suppressed entirely. Records that match any - suppression rule are omitted from the output. - """ - - field_transformations: MutableSequence['FieldTransformation'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='FieldTransformation', - ) - record_suppressions: MutableSequence['RecordSuppression'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='RecordSuppression', - ) - - -class RecordSuppression(proto.Message): - r"""Configuration to suppress records whose suppression - conditions evaluate to true. - - Attributes: - condition (google.cloud.dlp_v2.types.RecordCondition): - A condition that when it evaluates to true - will result in the record being evaluated to be - suppressed from the transformed content. - """ - - condition: 'RecordCondition' = proto.Field( - proto.MESSAGE, - number=1, - message='RecordCondition', - ) - - -class RecordCondition(proto.Message): - r"""A condition for determining whether a transformation should - be applied to a field. - - Attributes: - expressions (google.cloud.dlp_v2.types.RecordCondition.Expressions): - An expression. - """ - - class Condition(proto.Message): - r"""The field type of ``value`` and ``field`` do not need to match to be - considered equal, but not all comparisons are possible. EQUAL_TO and - NOT_EQUAL_TO attempt to compare even with incompatible types, but - all other comparisons are invalid with incompatible types. A - ``value`` of type: - - - ``string`` can be compared against all other types - - ``boolean`` can only be compared against other booleans - - ``integer`` can be compared against doubles or a string if the - string value can be parsed as an integer. - - ``double`` can be compared against integers or a string if the - string can be parsed as a double. - - ``Timestamp`` can be compared against strings in RFC 3339 date - string format. - - ``TimeOfDay`` can be compared against timestamps and strings in - the format of 'HH:mm:ss'. - - If we fail to compare do to type mismatch, a warning will be given - and the condition will evaluate to false. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Required. Field within the record this - condition is evaluated against. - operator (google.cloud.dlp_v2.types.RelationalOperator): - Required. Operator used to compare the field - or infoType to the value. - value (google.cloud.dlp_v2.types.Value): - Value to compare against. [Mandatory, except for ``EXISTS`` - tests.] - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - operator: 'RelationalOperator' = proto.Field( - proto.ENUM, - number=3, - enum='RelationalOperator', - ) - value: 'Value' = proto.Field( - proto.MESSAGE, - number=4, - message='Value', - ) - - class Conditions(proto.Message): - r"""A collection of conditions. - - Attributes: - conditions (MutableSequence[google.cloud.dlp_v2.types.RecordCondition.Condition]): - A collection of conditions. - """ - - conditions: MutableSequence['RecordCondition.Condition'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='RecordCondition.Condition', - ) - - class Expressions(proto.Message): - r"""An expression, consisting of an operator and conditions. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - logical_operator (google.cloud.dlp_v2.types.RecordCondition.Expressions.LogicalOperator): - The operator to apply to the result of conditions. Default - and currently only supported value is ``AND``. - conditions (google.cloud.dlp_v2.types.RecordCondition.Conditions): - Conditions to apply to the expression. - - This field is a member of `oneof`_ ``type``. - """ - class LogicalOperator(proto.Enum): - r"""Logical operators for conditional checks. - - Values: - LOGICAL_OPERATOR_UNSPECIFIED (0): - Unused - AND (1): - Conditional AND - """ - LOGICAL_OPERATOR_UNSPECIFIED = 0 - AND = 1 - - logical_operator: 'RecordCondition.Expressions.LogicalOperator' = proto.Field( - proto.ENUM, - number=1, - enum='RecordCondition.Expressions.LogicalOperator', - ) - conditions: 'RecordCondition.Conditions' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='RecordCondition.Conditions', - ) - - expressions: Expressions = proto.Field( - proto.MESSAGE, - number=3, - message=Expressions, - ) - - -class TransformationOverview(proto.Message): - r"""Overview of the modifications that occurred. - - Attributes: - transformed_bytes (int): - Total size in bytes that were transformed in - some way. - transformation_summaries (MutableSequence[google.cloud.dlp_v2.types.TransformationSummary]): - Transformations applied to the dataset. - """ - - transformed_bytes: int = proto.Field( - proto.INT64, - number=2, - ) - transformation_summaries: MutableSequence['TransformationSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='TransformationSummary', - ) - - -class TransformationSummary(proto.Message): - r"""Summary of a single transformation. Only one of 'transformation', - 'field_transformation', or 'record_suppress' will be set. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - Set if the transformation was limited to a - specific InfoType. - field (google.cloud.dlp_v2.types.FieldId): - Set if the transformation was limited to a - specific FieldId. - transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): - The specific transformation these stats apply - to. - field_transformations (MutableSequence[google.cloud.dlp_v2.types.FieldTransformation]): - The field transformation that was applied. - If multiple field transformations are requested - for a single field, this list will contain all - of them; otherwise, only one is supplied. - record_suppress (google.cloud.dlp_v2.types.RecordSuppression): - The specific suppression option these stats - apply to. - results (MutableSequence[google.cloud.dlp_v2.types.TransformationSummary.SummaryResult]): - Collection of all transformations that took - place or had an error. - transformed_bytes (int): - Total size in bytes that were transformed in - some way. - """ - class TransformationResultCode(proto.Enum): - r"""Possible outcomes of transformations. - - Values: - TRANSFORMATION_RESULT_CODE_UNSPECIFIED (0): - Unused - SUCCESS (1): - Transformation completed without an error. - ERROR (2): - Transformation had an error. - """ - TRANSFORMATION_RESULT_CODE_UNSPECIFIED = 0 - SUCCESS = 1 - ERROR = 2 - - class SummaryResult(proto.Message): - r"""A collection that informs the user the number of times a particular - ``TransformationResultCode`` and error details occurred. - - Attributes: - count (int): - Number of transformations counted by this - result. - code (google.cloud.dlp_v2.types.TransformationSummary.TransformationResultCode): - Outcome of the transformation. - details (str): - A place for warnings or errors to show up if - a transformation didn't work as expected. - """ - - count: int = proto.Field( - proto.INT64, - number=1, - ) - code: 'TransformationSummary.TransformationResultCode' = proto.Field( - proto.ENUM, - number=2, - enum='TransformationSummary.TransformationResultCode', - ) - details: str = proto.Field( - proto.STRING, - number=3, - ) - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - transformation: 'PrimitiveTransformation' = proto.Field( - proto.MESSAGE, - number=3, - message='PrimitiveTransformation', - ) - field_transformations: MutableSequence['FieldTransformation'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='FieldTransformation', - ) - record_suppress: 'RecordSuppression' = proto.Field( - proto.MESSAGE, - number=6, - message='RecordSuppression', - ) - results: MutableSequence[SummaryResult] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=SummaryResult, - ) - transformed_bytes: int = proto.Field( - proto.INT64, - number=7, - ) - - -class TransformationDescription(proto.Message): - r"""A flattened description of a ``PrimitiveTransformation`` or - ``RecordSuppression``. - - Attributes: - type_ (google.cloud.dlp_v2.types.TransformationType): - The transformation type. - description (str): - A description of the transformation. This is empty for a - RECORD_SUPPRESSION, or is the output of calling toString() - on the ``PrimitiveTransformation`` protocol buffer message - for any other type of transformation. - condition (str): - A human-readable string representation of the - ``RecordCondition`` corresponding to this transformation. - Set if a ``RecordCondition`` was used to determine whether - or not to apply this transformation. - - Examples: \* (age_field > 85) \* (age_field <= 18) \* - (zip_field exists) \* (zip_field == 01234) && (city_field != - "Springville") \* (zip_field == 01234) && (age_field <= 18) - && (city_field exists) - info_type (google.cloud.dlp_v2.types.InfoType): - Set if the transformation was limited to a specific - ``InfoType``. - """ - - type_: 'TransformationType' = proto.Field( - proto.ENUM, - number=1, - enum='TransformationType', - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - condition: str = proto.Field( - proto.STRING, - number=3, - ) - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=4, - message=storage.InfoType, - ) - - -class TransformationDetails(proto.Message): - r"""Details about a single transformation. This object contains a - description of the transformation, information about whether the - transformation was successfully applied, and the precise - location where the transformation occurred. These details are - stored in a user-specified BigQuery table. - - Attributes: - resource_name (str): - The name of the job that completed the - transformation. - container_name (str): - The top level name of the container where the - transformation is located (this will be the - source file name or table name). - transformation (MutableSequence[google.cloud.dlp_v2.types.TransformationDescription]): - Description of transformation. This would only contain more - than one element if there were multiple matching - transformations and which one to apply was ambiguous. Not - set for states that contain no transformation, currently - only state that contains no transformation is - TransformationResultStateType.METADATA_UNRETRIEVABLE. - status_details (google.cloud.dlp_v2.types.TransformationResultStatus): - Status of the transformation, if - transformation was not successful, this will - specify what caused it to fail, otherwise it - will show that the transformation was - successful. - transformed_bytes (int): - The number of bytes that were transformed. If - transformation was unsuccessful or did not take - place because there was no content to transform, - this will be zero. - transformation_location (google.cloud.dlp_v2.types.TransformationLocation): - The precise location of the transformed - content in the original container. - """ - - resource_name: str = proto.Field( - proto.STRING, - number=1, - ) - container_name: str = proto.Field( - proto.STRING, - number=2, - ) - transformation: MutableSequence['TransformationDescription'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='TransformationDescription', - ) - status_details: 'TransformationResultStatus' = proto.Field( - proto.MESSAGE, - number=4, - message='TransformationResultStatus', - ) - transformed_bytes: int = proto.Field( - proto.INT64, - number=5, - ) - transformation_location: 'TransformationLocation' = proto.Field( - proto.MESSAGE, - number=6, - message='TransformationLocation', - ) - - -class TransformationLocation(proto.Message): - r"""Specifies the location of a transformation. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - finding_id (str): - For infotype transformations, link to the - corresponding findings ID so that location - information does not need to be duplicated. Each - findings ID correlates to an entry in the - findings output table, this table only gets - created when users specify to save findings (add - the save findings action to the request). - - This field is a member of `oneof`_ ``location_type``. - record_transformation (google.cloud.dlp_v2.types.RecordTransformation): - For record transformations, provide a field - and container information. - - This field is a member of `oneof`_ ``location_type``. - container_type (google.cloud.dlp_v2.types.TransformationContainerType): - Information about the functionality of the - container where this finding occurred, if - available. - """ - - finding_id: str = proto.Field( - proto.STRING, - number=1, - oneof='location_type', - ) - record_transformation: 'RecordTransformation' = proto.Field( - proto.MESSAGE, - number=2, - oneof='location_type', - message='RecordTransformation', - ) - container_type: 'TransformationContainerType' = proto.Field( - proto.ENUM, - number=3, - enum='TransformationContainerType', - ) - - -class RecordTransformation(proto.Message): - r""" - - Attributes: - field_id (google.cloud.dlp_v2.types.FieldId): - For record transformations, provide a field. - container_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Findings container modification timestamp, if - applicable. - container_version (str): - Container version, if available ("generation" - for Cloud Storage). - """ - - field_id: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - container_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - container_version: str = proto.Field( - proto.STRING, - number=3, - ) - - -class TransformationResultStatus(proto.Message): - r""" - - Attributes: - result_status_type (google.cloud.dlp_v2.types.TransformationResultStatusType): - Transformation result status type, this will - be either SUCCESS, or it will be the reason for - why the transformation was not completely - successful. - details (google.rpc.status_pb2.Status): - Detailed error codes and messages - """ - - result_status_type: 'TransformationResultStatusType' = proto.Field( - proto.ENUM, - number=1, - enum='TransformationResultStatusType', - ) - details: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=2, - message=status_pb2.Status, - ) - - -class TransformationDetailsStorageConfig(proto.Message): - r"""Config for storing transformation details. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - The BigQuery table in which to store the output. This may be - an existing table or in a new table in an existing dataset. - If table_id is not set a new one will be generated for you - with the following format: - dlp_googleapis_transformation_details_yyyy_mm_dd_[dlp_job_id]. - Pacific time zone will be used for generating the date - details. - - This field is a member of `oneof`_ ``type``. - """ - - table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.BigQueryTable, - ) - - -class Schedule(proto.Message): - r"""Schedule for inspect job triggers. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - recurrence_period_duration (google.protobuf.duration_pb2.Duration): - With this option a job is started on a - regular periodic basis. For example: every day - (86400 seconds). - A scheduled start time will be skipped if the - previous execution has not ended when its - scheduled time occurs. - This value must be set to a time duration - greater than or equal to 1 day and can be no - longer than 60 days. - - This field is a member of `oneof`_ ``option``. - """ - - recurrence_period_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=1, - oneof='option', - message=duration_pb2.Duration, - ) - - -class Manual(proto.Message): - r"""Job trigger option for hybrid jobs. Jobs must be manually - created and finished. - - """ - - -class InspectTemplate(proto.Message): - r"""The inspectTemplate contains a configuration (set of types of - sensitive data to be detected) to be used anywhere you otherwise - would normally specify InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates to learn - more. - - Attributes: - name (str): - Output only. The template name. - - The template will have one of the following formats: - ``projects/PROJECT_ID/inspectTemplates/TEMPLATE_ID`` OR - ``organizations/ORGANIZATION_ID/inspectTemplates/TEMPLATE_ID``; - display_name (str): - Display name (max 256 chars). - description (str): - Short description (max 256 chars). - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of an - inspectTemplate. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of an - inspectTemplate. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - The core content of the template. - Configuration of the scanning process. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=6, - message='InspectConfig', - ) - - -class DeidentifyTemplate(proto.Message): - r"""DeidentifyTemplates contains instructions on how to - de-identify content. See - https://cloud.google.com/dlp/docs/concepts-templates to learn - more. - - Attributes: - name (str): - Output only. The template name. - - The template will have one of the following formats: - ``projects/PROJECT_ID/deidentifyTemplates/TEMPLATE_ID`` OR - ``organizations/ORGANIZATION_ID/deidentifyTemplates/TEMPLATE_ID`` - display_name (str): - Display name (max 256 chars). - description (str): - Short description (max 256 chars). - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of an - inspectTemplate. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of an - inspectTemplate. - deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): - The core content of the template. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - deidentify_config: 'DeidentifyConfig' = proto.Field( - proto.MESSAGE, - number=6, - message='DeidentifyConfig', - ) - - -class Error(proto.Message): - r"""Details information about an error encountered during job - execution or the results of an unsuccessful activation of the - JobTrigger. - - Attributes: - details (google.rpc.status_pb2.Status): - Detailed error codes and messages. - timestamps (MutableSequence[google.protobuf.timestamp_pb2.Timestamp]): - The times the error occurred. - """ - - details: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=1, - message=status_pb2.Status, - ) - timestamps: MutableSequence[timestamp_pb2.Timestamp] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class JobTrigger(proto.Message): - r"""Contains a configuration to make dlp api calls on a repeating - basis. See - https://cloud.google.com/dlp/docs/concepts-job-triggers to learn - more. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Unique resource name for the triggeredJob, assigned by the - service when the triggeredJob is created, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - display_name (str): - Display name (max 100 chars) - description (str): - User provided description (max 256 chars) - inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): - For inspect jobs, a snapshot of the - configuration. - - This field is a member of `oneof`_ ``job``. - triggers (MutableSequence[google.cloud.dlp_v2.types.JobTrigger.Trigger]): - A list of triggers which will be OR'ed - together. Only one in the list needs to trigger - for a job to be started. The list may contain - only a single Schedule trigger and must have at - least one object. - errors (MutableSequence[google.cloud.dlp_v2.types.Error]): - Output only. A stream of errors encountered - when the trigger was activated. Repeated errors - may result in the JobTrigger automatically being - paused. Will return the last 100 errors. - Whenever the JobTrigger is modified this list - will be cleared. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of a - triggeredJob. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of a - triggeredJob. - last_run_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp of the last time - this trigger executed. - status (google.cloud.dlp_v2.types.JobTrigger.Status): - Required. A status for this trigger. - """ - class Status(proto.Enum): - r"""Whether the trigger is currently active. If PAUSED or - CANCELLED, no jobs will be created with this configuration. The - service may automatically pause triggers experiencing frequent - errors. To restart a job, set the status to HEALTHY after - correcting user errors. - - Values: - STATUS_UNSPECIFIED (0): - Unused. - HEALTHY (1): - Trigger is healthy. - PAUSED (2): - Trigger is temporarily paused. - CANCELLED (3): - Trigger is cancelled and can not be resumed. - """ - STATUS_UNSPECIFIED = 0 - HEALTHY = 1 - PAUSED = 2 - CANCELLED = 3 - - class Trigger(proto.Message): - r"""What event needs to occur for a new job to be started. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - schedule (google.cloud.dlp_v2.types.Schedule): - Create a job on a repeating basis based on - the elapse of time. - - This field is a member of `oneof`_ ``trigger``. - manual (google.cloud.dlp_v2.types.Manual): - For use with hybrid jobs. Jobs must be - manually created and finished. - - This field is a member of `oneof`_ ``trigger``. - """ - - schedule: 'Schedule' = proto.Field( - proto.MESSAGE, - number=1, - oneof='trigger', - message='Schedule', - ) - manual: 'Manual' = proto.Field( - proto.MESSAGE, - number=2, - oneof='trigger', - message='Manual', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - inspect_job: 'InspectJobConfig' = proto.Field( - proto.MESSAGE, - number=4, - oneof='job', - message='InspectJobConfig', - ) - triggers: MutableSequence[Trigger] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=Trigger, - ) - errors: MutableSequence['Error'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='Error', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - last_run_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - status: Status = proto.Field( - proto.ENUM, - number=10, - enum=Status, - ) - - -class Action(proto.Message): - r"""A task to execute on the completion of a job. - See https://cloud.google.com/dlp/docs/concepts-actions to learn - more. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - save_findings (google.cloud.dlp_v2.types.Action.SaveFindings): - Save resulting findings in a provided - location. - - This field is a member of `oneof`_ ``action``. - pub_sub (google.cloud.dlp_v2.types.Action.PublishToPubSub): - Publish a notification to a Pub/Sub topic. - - This field is a member of `oneof`_ ``action``. - publish_summary_to_cscc (google.cloud.dlp_v2.types.Action.PublishSummaryToCscc): - Publish summary to Cloud Security Command - Center (Alpha). - - This field is a member of `oneof`_ ``action``. - publish_findings_to_cloud_data_catalog (google.cloud.dlp_v2.types.Action.PublishFindingsToCloudDataCatalog): - Publish findings to Cloud Datahub. - - This field is a member of `oneof`_ ``action``. - deidentify (google.cloud.dlp_v2.types.Action.Deidentify): - Create a de-identified copy of the input - data. - - This field is a member of `oneof`_ ``action``. - job_notification_emails (google.cloud.dlp_v2.types.Action.JobNotificationEmails): - Sends an email when the job completes. The email goes to IAM - project owners and technical `Essential - Contacts `__. - - This field is a member of `oneof`_ ``action``. - publish_to_stackdriver (google.cloud.dlp_v2.types.Action.PublishToStackdriver): - Enable Stackdriver metric dlp.googleapis.com/finding_count. - - This field is a member of `oneof`_ ``action``. - """ - - class SaveFindings(proto.Message): - r"""If set, the detailed findings will be persisted to the - specified OutputStorageConfig. Only a single instance of this - action can be specified. - Compatible with: Inspect, Risk - - Attributes: - output_config (google.cloud.dlp_v2.types.OutputStorageConfig): - Location to store findings outside of DLP. - """ - - output_config: 'OutputStorageConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='OutputStorageConfig', - ) - - class PublishToPubSub(proto.Message): - r"""Publish a message into a given Pub/Sub topic when DlpJob has - completed. The message contains a single field, ``DlpJobName``, - which is equal to the finished job's - ```DlpJob.name`` `__. - Compatible with: Inspect, Risk - - Attributes: - topic (str): - Cloud Pub/Sub topic to send notifications to. - The topic must have given publishing access - rights to the DLP API service account executing - the long running DlpJob sending the - notifications. Format is - projects/{project}/topics/{topic}. - """ - - topic: str = proto.Field( - proto.STRING, - number=1, - ) - - class PublishSummaryToCscc(proto.Message): - r"""Publish the result summary of a DlpJob to the Cloud Security - Command Center (CSCC Alpha). - This action is only available for projects which are parts of an - organization and whitelisted for the alpha Cloud Security - Command Center. - The action will publish the count of finding instances and their - info types. The summary of findings will be persisted in CSCC - and are governed by CSCC service-specific policy, see - https://cloud.google.com/terms/service-terms Only a single - instance of this action can be specified. Compatible with: - Inspect - - """ - - class PublishFindingsToCloudDataCatalog(proto.Message): - r"""Publish findings of a DlpJob to Data Catalog. In Data Catalog, tag - templates are applied to the resource that Cloud DLP scanned. Data - Catalog tag templates are stored in the same project and region - where the BigQuery table exists. For Cloud DLP to create and apply - the tag template, the Cloud DLP service agent must have the - ``roles/datacatalog.tagTemplateOwner`` permission on the project. - The tag template contains fields summarizing the results of the - DlpJob. Any field values previously written by another DlpJob are - deleted. [InfoType naming patterns][google.privacy.dlp.v2.InfoType] - are strictly enforced when using this feature. - - Findings are persisted in Data Catalog storage and are governed by - service-specific policies for Data Catalog. For more information, - see `Service Specific - Terms `__. - - Only a single instance of this action can be specified. This action - is allowed only if all resources being scanned are BigQuery tables. - Compatible with: Inspect - - """ - - class Deidentify(proto.Message): - r"""Create a de-identified copy of the requested table or files. - - A TransformationDetail will be created for each transformation. - - If any rows in BigQuery are skipped during de-identification - (transformation errors or row size exceeds BigQuery insert API - limits) they are placed in the failure output table. If the original - row exceeds the BigQuery insert API limit it will be truncated when - written to the failure output table. The failure output table can be - set in the - action.deidentify.output.big_query_output.deidentified_failure_output_table - field, if no table is set, a table will be automatically created in - the same project and dataset as the original table. - - Compatible with: Inspect - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - transformation_config (google.cloud.dlp_v2.types.TransformationConfig): - User specified deidentify templates and - configs for structured, unstructured, and image - files. - transformation_details_storage_config (google.cloud.dlp_v2.types.TransformationDetailsStorageConfig): - Config for storing transformation details. This is separate - from the de-identified content, and contains metadata about - the successful transformations and/or failures that occurred - while de-identifying. This needs to be set in order for - users to access information about the status of each - transformation (see - [TransformationDetails][google.privacy.dlp.v2.TransformationDetails] - message for more information about what is noted). - cloud_storage_output (str): - Required. User settable Cloud Storage bucket - and folders to store de-identified files. This - field must be set for cloud storage - deidentification. The output Cloud Storage - bucket must be different from the input bucket. - De-identified files will overwrite files in the - output path. - Form of: gs://bucket/folder/ or gs://bucket - - This field is a member of `oneof`_ ``output``. - file_types_to_transform (MutableSequence[google.cloud.dlp_v2.types.FileType]): - List of user-specified file type groups to transform. If - specified, only the files with these filetypes will be - transformed. If empty, all supported files will be - transformed. Supported types may be automatically added over - time. If a file type is set in this field that isn't - supported by the Deidentify action then the job will fail - and will not be successfully created/started. Currently the - only filetypes supported are: IMAGES, TEXT_FILES, CSV, TSV. - """ - - transformation_config: 'TransformationConfig' = proto.Field( - proto.MESSAGE, - number=7, - message='TransformationConfig', - ) - transformation_details_storage_config: 'TransformationDetailsStorageConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='TransformationDetailsStorageConfig', - ) - cloud_storage_output: str = proto.Field( - proto.STRING, - number=9, - oneof='output', - ) - file_types_to_transform: MutableSequence[storage.FileType] = proto.RepeatedField( - proto.ENUM, - number=8, - enum=storage.FileType, - ) - - class JobNotificationEmails(proto.Message): - r"""Sends an email when the job completes. The email goes to IAM project - owners and technical `Essential - Contacts `__. - - """ - - class PublishToStackdriver(proto.Message): - r"""Enable Stackdriver metric dlp.googleapis.com/finding_count. This - will publish a metric to stack driver on each infotype requested and - how many findings were found for it. CustomDetectors will be - bucketed as 'Custom' under the Stackdriver label 'info_type'. - - """ - - save_findings: SaveFindings = proto.Field( - proto.MESSAGE, - number=1, - oneof='action', - message=SaveFindings, - ) - pub_sub: PublishToPubSub = proto.Field( - proto.MESSAGE, - number=2, - oneof='action', - message=PublishToPubSub, - ) - publish_summary_to_cscc: PublishSummaryToCscc = proto.Field( - proto.MESSAGE, - number=3, - oneof='action', - message=PublishSummaryToCscc, - ) - publish_findings_to_cloud_data_catalog: PublishFindingsToCloudDataCatalog = proto.Field( - proto.MESSAGE, - number=5, - oneof='action', - message=PublishFindingsToCloudDataCatalog, - ) - deidentify: Deidentify = proto.Field( - proto.MESSAGE, - number=7, - oneof='action', - message=Deidentify, - ) - job_notification_emails: JobNotificationEmails = proto.Field( - proto.MESSAGE, - number=8, - oneof='action', - message=JobNotificationEmails, - ) - publish_to_stackdriver: PublishToStackdriver = proto.Field( - proto.MESSAGE, - number=9, - oneof='action', - message=PublishToStackdriver, - ) - - -class TransformationConfig(proto.Message): - r"""User specified templates and configs for how to deidentify - structured, unstructures, and image files. User must provide - either a unstructured deidentify template or at least one redact - image config. - - Attributes: - deidentify_template (str): - De-identify template. If this template is specified, it will - serve as the default de-identify template. This template - cannot contain ``record_transformations`` since it can be - used for unstructured content such as free-form text files. - If this template is not set, a default - ``ReplaceWithInfoTypeConfig`` will be used to de-identify - unstructured content. - structured_deidentify_template (str): - Structured de-identify template. If this template is - specified, it will serve as the de-identify template for - structured content such as delimited files and tables. If - this template is not set but the ``deidentify_template`` is - set, then ``deidentify_template`` will also apply to the - structured content. If neither template is set, a default - ``ReplaceWithInfoTypeConfig`` will be used to de-identify - structured content. - image_redact_template (str): - Image redact template. - If this template is specified, it will serve as - the de-identify template for images. If this - template is not set, all findings in the image - will be redacted with a black box. - """ - - deidentify_template: str = proto.Field( - proto.STRING, - number=1, - ) - structured_deidentify_template: str = proto.Field( - proto.STRING, - number=2, - ) - image_redact_template: str = proto.Field( - proto.STRING, - number=4, - ) - - -class CreateInspectTemplateRequest(proto.Message): - r"""Request message for CreateInspectTemplate. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - Required. The InspectTemplate to create. - template_id (str): - The template id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - inspect_template: 'InspectTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectTemplate', - ) - template_id: str = proto.Field( - proto.STRING, - number=3, - ) - location_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class UpdateInspectTemplateRequest(proto.Message): - r"""Request message for UpdateInspectTemplate. - - Attributes: - name (str): - Required. Resource name of organization and inspectTemplate - to be updated, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - New InspectTemplate value. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - inspect_template: 'InspectTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectTemplate', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetInspectTemplateRequest(proto.Message): - r"""Request message for GetInspectTemplate. - - Attributes: - name (str): - Required. Resource name of the organization and - inspectTemplate to be read, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListInspectTemplatesRequest(proto.Message): - r"""Request message for ListInspectTemplates. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from previous call - to ``ListInspectTemplates``. - page_size (int): - Size of the page, can be limited by the - server. If zero server returns a page of max - size 100. - order_by (str): - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space - characters are insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the template was - created. - - ``update_time``: corresponds to the time the template was - last updated. - - ``name``: corresponds to the template's name. - - ``display_name``: corresponds to the template's display - name. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListInspectTemplatesResponse(proto.Message): - r"""Response message for ListInspectTemplates. - - Attributes: - inspect_templates (MutableSequence[google.cloud.dlp_v2.types.InspectTemplate]): - List of inspectTemplates, up to page_size in - ListInspectTemplatesRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in following - ListInspectTemplates request. - """ - - @property - def raw_page(self): - return self - - inspect_templates: MutableSequence['InspectTemplate'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='InspectTemplate', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteInspectTemplateRequest(proto.Message): - r"""Request message for DeleteInspectTemplate. - - Attributes: - name (str): - Required. Resource name of the organization and - inspectTemplate to be deleted, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateJobTriggerRequest(proto.Message): - r"""Request message for CreateJobTrigger. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - Required. The JobTrigger to create. - trigger_id (str): - The trigger id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - job_trigger: 'JobTrigger' = proto.Field( - proto.MESSAGE, - number=2, - message='JobTrigger', - ) - trigger_id: str = proto.Field( - proto.STRING, - number=3, - ) - location_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ActivateJobTriggerRequest(proto.Message): - r"""Request message for ActivateJobTrigger. - - Attributes: - name (str): - Required. Resource name of the trigger to activate, for - example ``projects/dlp-test-project/jobTriggers/53234423``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateJobTriggerRequest(proto.Message): - r"""Request message for UpdateJobTrigger. - - Attributes: - name (str): - Required. Resource name of the project and the triggeredJob, - for example - ``projects/dlp-test-project/jobTriggers/53234423``. - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - New JobTrigger value. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - job_trigger: 'JobTrigger' = proto.Field( - proto.MESSAGE, - number=2, - message='JobTrigger', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetJobTriggerRequest(proto.Message): - r"""Request message for GetJobTrigger. - - Attributes: - name (str): - Required. Resource name of the project and the triggeredJob, - for example - ``projects/dlp-test-project/jobTriggers/53234423``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateDlpJobRequest(proto.Message): - r"""Request message for CreateDlpJobRequest. Used to initiate - long running jobs such as calculating risk metrics or inspecting - Google Cloud Storage. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): - An inspection job scans a storage repository - for InfoTypes. - - This field is a member of `oneof`_ ``job``. - risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): - A risk analysis job calculates - re-identification risk metrics for a BigQuery - table. - - This field is a member of `oneof`_ ``job``. - job_id (str): - The job id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - inspect_job: 'InspectJobConfig' = proto.Field( - proto.MESSAGE, - number=2, - oneof='job', - message='InspectJobConfig', - ) - risk_job: 'RiskAnalysisJobConfig' = proto.Field( - proto.MESSAGE, - number=3, - oneof='job', - message='RiskAnalysisJobConfig', - ) - job_id: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListJobTriggersRequest(proto.Message): - r"""Request message for ListJobTriggers. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from previous call - to ListJobTriggers. ``order_by`` field must not change for - subsequent calls. - page_size (int): - Size of the page, can be limited by a server. - order_by (str): - Comma separated list of triggeredJob fields to order by, - followed by ``asc`` or ``desc`` postfix. This list is - case-insensitive, default sorting order is ascending, - redundant space characters are insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the JobTrigger - was created. - - ``update_time``: corresponds to the time the JobTrigger - was last updated. - - ``last_run_time``: corresponds to the last time the - JobTrigger ran. - - ``name``: corresponds to the JobTrigger's name. - - ``display_name``: corresponds to the JobTrigger's display - name. - - ``status``: corresponds to JobTrigger's status. - filter (str): - Allows filtering. - - Supported syntax: - - - Filter expressions are made up of one or more - restrictions. - - Restrictions can be combined by ``AND`` or ``OR`` logical - operators. A sequence of restrictions implicitly uses - ``AND``. - - A restriction has the form of - ``{field} {operator} {value}``. - - Supported fields/values for inspect triggers: - - - ``status`` - HEALTHY|PAUSED|CANCELLED - - ``inspected_storage`` - - DATASTORE|CLOUD_STORAGE|BIGQUERY - - 'last_run_time\` - RFC 3339 formatted timestamp, - surrounded by quotation marks. Nanoseconds are - ignored. - - 'error_count' - Number of errors that have occurred - while running. - - - The operator must be ``=`` or ``!=`` for status and - inspected_storage. - - Examples: - - - inspected_storage = cloud_storage AND status = HEALTHY - - inspected_storage = cloud_storage OR inspected_storage = - bigquery - - inspected_storage = cloud_storage AND (state = PAUSED OR - state = HEALTHY) - - last_run_time > "2017-12-12T00:00:00+00:00" - - The length of this field should be no more than 500 - characters. - type_ (google.cloud.dlp_v2.types.DlpJobType): - The type of jobs. Will use ``DlpJobType.INSPECT`` if not - set. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=5, - ) - type_: 'DlpJobType' = proto.Field( - proto.ENUM, - number=6, - enum='DlpJobType', - ) - location_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -class ListJobTriggersResponse(proto.Message): - r"""Response message for ListJobTriggers. - - Attributes: - job_triggers (MutableSequence[google.cloud.dlp_v2.types.JobTrigger]): - List of triggeredJobs, up to page_size in - ListJobTriggersRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in following - ListJobTriggers request. - """ - - @property - def raw_page(self): - return self - - job_triggers: MutableSequence['JobTrigger'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='JobTrigger', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteJobTriggerRequest(proto.Message): - r"""Request message for DeleteJobTrigger. - - Attributes: - name (str): - Required. Resource name of the project and the triggeredJob, - for example - ``projects/dlp-test-project/jobTriggers/53234423``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class InspectJobConfig(proto.Message): - r"""Controls what and how to inspect for findings. - - Attributes: - storage_config (google.cloud.dlp_v2.types.StorageConfig): - The data to scan. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - How and what to scan for. - inspect_template_name (str): - If provided, will be used as the default for all values in - InspectConfig. ``inspect_config`` will be merged into the - values persisted as part of the template. - actions (MutableSequence[google.cloud.dlp_v2.types.Action]): - Actions to execute at the completion of the - job. - """ - - storage_config: storage.StorageConfig = proto.Field( - proto.MESSAGE, - number=1, - message=storage.StorageConfig, - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - inspect_template_name: str = proto.Field( - proto.STRING, - number=3, - ) - actions: MutableSequence['Action'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='Action', - ) - - -class DataProfileAction(proto.Message): - r"""A task to execute when a data profile has been generated. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - export_data (google.cloud.dlp_v2.types.DataProfileAction.Export): - Export data profiles into a provided - location. - - This field is a member of `oneof`_ ``action``. - pub_sub_notification (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification): - Publish a message into the Pub/Sub topic. - - This field is a member of `oneof`_ ``action``. - """ - class EventType(proto.Enum): - r"""Types of event that can trigger an action. - - Values: - EVENT_TYPE_UNSPECIFIED (0): - Unused. - NEW_PROFILE (1): - New profile (not a re-profile). - CHANGED_PROFILE (2): - Changed one of the following profile metrics: - - - Table data risk score - - Table sensitivity score - - Table resource visibility - - Table encryption type - - Table predicted infoTypes - - Table other infoTypes - SCORE_INCREASED (3): - Table data risk score or sensitivity score - increased. - ERROR_CHANGED (4): - A user (non-internal) error occurred. - """ - EVENT_TYPE_UNSPECIFIED = 0 - NEW_PROFILE = 1 - CHANGED_PROFILE = 2 - SCORE_INCREASED = 3 - ERROR_CHANGED = 4 - - class Export(proto.Message): - r"""If set, the detailed data profiles will be persisted to the - location of your choice whenever updated. - - Attributes: - profile_table (google.cloud.dlp_v2.types.BigQueryTable): - Store all table and column profiles in an - existing table or a new table in an existing - dataset. Each re-generation will result in a new - row in BigQuery. - """ - - profile_table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=1, - message=storage.BigQueryTable, - ) - - class PubSubNotification(proto.Message): - r"""Send a Pub/Sub message into the given Pub/Sub topic to connect other - systems to data profile generation. The message payload data will be - the byte serialization of ``DataProfilePubSubMessage``. - - Attributes: - topic (str): - Cloud Pub/Sub topic to send notifications to. - Format is projects/{project}/topics/{topic}. - event (google.cloud.dlp_v2.types.DataProfileAction.EventType): - The type of event that triggers a Pub/Sub. At most one - ``PubSubNotification`` per EventType is permitted. - pubsub_condition (google.cloud.dlp_v2.types.DataProfilePubSubCondition): - Conditions (e.g., data risk or sensitivity - level) for triggering a Pub/Sub. - detail_of_message (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification.DetailLevel): - How much data to include in the Pub/Sub message. If the user - wishes to limit the size of the message, they can use - resource_name and fetch the profile fields they wish to. Per - table profile (not per column). - """ - class DetailLevel(proto.Enum): - r"""The levels of detail that can be included in the Pub/Sub - message. - - Values: - DETAIL_LEVEL_UNSPECIFIED (0): - Unused. - TABLE_PROFILE (1): - The full table data profile. - RESOURCE_NAME (2): - The resource name of the table. - """ - DETAIL_LEVEL_UNSPECIFIED = 0 - TABLE_PROFILE = 1 - RESOURCE_NAME = 2 - - topic: str = proto.Field( - proto.STRING, - number=1, - ) - event: 'DataProfileAction.EventType' = proto.Field( - proto.ENUM, - number=2, - enum='DataProfileAction.EventType', - ) - pubsub_condition: 'DataProfilePubSubCondition' = proto.Field( - proto.MESSAGE, - number=3, - message='DataProfilePubSubCondition', - ) - detail_of_message: 'DataProfileAction.PubSubNotification.DetailLevel' = proto.Field( - proto.ENUM, - number=4, - enum='DataProfileAction.PubSubNotification.DetailLevel', - ) - - export_data: Export = proto.Field( - proto.MESSAGE, - number=1, - oneof='action', - message=Export, - ) - pub_sub_notification: PubSubNotification = proto.Field( - proto.MESSAGE, - number=2, - oneof='action', - message=PubSubNotification, - ) - - -class DataProfileJobConfig(proto.Message): - r"""Configuration for setting up a job to scan resources for profile - generation. Only one data profile configuration may exist per - organization, folder, or project. - - The generated data profiles are retained according to the [data - retention policy] - (https://cloud.google.com/dlp/docs/data-profiles#retention). - - Attributes: - location (google.cloud.dlp_v2.types.DataProfileLocation): - The data to scan. - project_id (str): - The project that will run the scan. The DLP - service account that exists within this project - must have access to all resources that are - profiled, and the Cloud DLP API must be enabled. - inspect_templates (MutableSequence[str]): - Detection logic for profile generation. - - Not all template features are used by profiles. - FindingLimits, include_quote and exclude_info_types have no - impact on data profiling. - - Multiple templates may be provided if there is data in - multiple regions. At most one template must be specified - per-region (including "global"). Each region is scanned - using the applicable template. If no region-specific - template is specified, but a "global" template is specified, - it will be copied to that region and used instead. If no - global or region-specific template is provided for a region - with data, that region's data will not be scanned. - - For more information, see - https://cloud.google.com/dlp/docs/data-profiles#data_residency. - data_profile_actions (MutableSequence[google.cloud.dlp_v2.types.DataProfileAction]): - Actions to execute at the completion of the - job. - """ - - location: 'DataProfileLocation' = proto.Field( - proto.MESSAGE, - number=1, - message='DataProfileLocation', - ) - project_id: str = proto.Field( - proto.STRING, - number=5, - ) - inspect_templates: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - data_profile_actions: MutableSequence['DataProfileAction'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='DataProfileAction', - ) - - -class DataProfileLocation(proto.Message): - r"""The data that will be profiled. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - organization_id (int): - The ID of an organization to scan. - - This field is a member of `oneof`_ ``location``. - folder_id (int): - The ID of the Folder within an organization - to scan. - - This field is a member of `oneof`_ ``location``. - """ - - organization_id: int = proto.Field( - proto.INT64, - number=1, - oneof='location', - ) - folder_id: int = proto.Field( - proto.INT64, - number=2, - oneof='location', - ) - - -class DlpJob(proto.Message): - r"""Combines all of the information about a DLP job. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - The server-assigned name. - type_ (google.cloud.dlp_v2.types.DlpJobType): - The type of job. - state (google.cloud.dlp_v2.types.DlpJob.JobState): - State of a job. - risk_details (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails): - Results from analyzing risk of a data source. - - This field is a member of `oneof`_ ``details``. - inspect_details (google.cloud.dlp_v2.types.InspectDataSourceDetails): - Results from inspecting a data source. - - This field is a member of `oneof`_ ``details``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the job was created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the job started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the job finished. - job_trigger_name (str): - If created by a job trigger, the resource - name of the trigger that instantiated the job. - errors (MutableSequence[google.cloud.dlp_v2.types.Error]): - A stream of errors encountered running the - job. - """ - class JobState(proto.Enum): - r"""Possible states of a job. New items may be added. - - Values: - JOB_STATE_UNSPECIFIED (0): - Unused. - PENDING (1): - The job has not yet started. - RUNNING (2): - The job is currently running. Once a job has - finished it will transition to FAILED or DONE. - DONE (3): - The job is no longer running. - CANCELED (4): - The job was canceled before it could be - completed. - FAILED (5): - The job had an error and did not complete. - ACTIVE (6): - The job is currently accepting findings via - hybridInspect. A hybrid job in ACTIVE state may - continue to have findings added to it through - the calling of hybridInspect. After the job has - finished no more calls to hybridInspect may be - made. ACTIVE jobs can transition to DONE. - """ - JOB_STATE_UNSPECIFIED = 0 - PENDING = 1 - RUNNING = 2 - DONE = 3 - CANCELED = 4 - FAILED = 5 - ACTIVE = 6 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'DlpJobType' = proto.Field( - proto.ENUM, - number=2, - enum='DlpJobType', - ) - state: JobState = proto.Field( - proto.ENUM, - number=3, - enum=JobState, - ) - risk_details: 'AnalyzeDataSourceRiskDetails' = proto.Field( - proto.MESSAGE, - number=4, - oneof='details', - message='AnalyzeDataSourceRiskDetails', - ) - inspect_details: 'InspectDataSourceDetails' = proto.Field( - proto.MESSAGE, - number=5, - oneof='details', - message='InspectDataSourceDetails', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - job_trigger_name: str = proto.Field( - proto.STRING, - number=10, - ) - errors: MutableSequence['Error'] = proto.RepeatedField( - proto.MESSAGE, - number=11, - message='Error', - ) - - -class GetDlpJobRequest(proto.Message): - r"""The request message for [DlpJobs.GetDlpJob][]. - - Attributes: - name (str): - Required. The name of the DlpJob resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDlpJobsRequest(proto.Message): - r"""The request message for listing DLP jobs. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - filter (str): - Allows filtering. - - Supported syntax: - - - Filter expressions are made up of one or more - restrictions. - - Restrictions can be combined by ``AND`` or ``OR`` logical - operators. A sequence of restrictions implicitly uses - ``AND``. - - A restriction has the form of - ``{field} {operator} {value}``. - - Supported fields/values for inspect jobs: - - - ``state`` - PENDING|RUNNING|CANCELED|FINISHED|FAILED - - ``inspected_storage`` - - DATASTORE|CLOUD_STORAGE|BIGQUERY - - ``trigger_name`` - The name of the trigger that - created the job. - - 'end_time\` - Corresponds to the time the job - finished. - - 'start_time\` - Corresponds to the time the job - finished. - - - Supported fields for risk analysis jobs: - - - ``state`` - RUNNING|CANCELED|FINISHED|FAILED - - 'end_time\` - Corresponds to the time the job - finished. - - 'start_time\` - Corresponds to the time the job - finished. - - - The operator must be ``=`` or ``!=``. - - Examples: - - - inspected_storage = cloud_storage AND state = done - - inspected_storage = cloud_storage OR inspected_storage = - bigquery - - inspected_storage = cloud_storage AND (state = done OR - state = canceled) - - end_time > "2017-12-12T00:00:00+00:00" - - The length of this field should be no more than 500 - characters. - page_size (int): - The standard list page size. - page_token (str): - The standard list page token. - type_ (google.cloud.dlp_v2.types.DlpJobType): - The type of job. Defaults to ``DlpJobType.INSPECT`` - order_by (str): - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space - characters are insignificant. - - Example: ``name asc, end_time asc, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the job was - created. - - ``end_time``: corresponds to the time the job ended. - - ``name``: corresponds to the job's name. - - ``state``: corresponds to ``state`` - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - type_: 'DlpJobType' = proto.Field( - proto.ENUM, - number=5, - enum='DlpJobType', - ) - order_by: str = proto.Field( - proto.STRING, - number=6, - ) - location_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -class ListDlpJobsResponse(proto.Message): - r"""The response message for listing DLP jobs. - - Attributes: - jobs (MutableSequence[google.cloud.dlp_v2.types.DlpJob]): - A list of DlpJobs that matches the specified - filter in the request. - next_page_token (str): - The standard List next-page token. - """ - - @property - def raw_page(self): - return self - - jobs: MutableSequence['DlpJob'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DlpJob', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CancelDlpJobRequest(proto.Message): - r"""The request message for canceling a DLP job. - - Attributes: - name (str): - Required. The name of the DlpJob resource to - be cancelled. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class FinishDlpJobRequest(proto.Message): - r"""The request message for finishing a DLP hybrid job. - - Attributes: - name (str): - Required. The name of the DlpJob resource to - be cancelled. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteDlpJobRequest(proto.Message): - r"""The request message for deleting a DLP job. - - Attributes: - name (str): - Required. The name of the DlpJob resource to - be deleted. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateDeidentifyTemplateRequest(proto.Message): - r"""Request message for CreateDeidentifyTemplate. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - Required. The DeidentifyTemplate to create. - template_id (str): - The template id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - deidentify_template: 'DeidentifyTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyTemplate', - ) - template_id: str = proto.Field( - proto.STRING, - number=3, - ) - location_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class UpdateDeidentifyTemplateRequest(proto.Message): - r"""Request message for UpdateDeidentifyTemplate. - - Attributes: - name (str): - Required. Resource name of organization and deidentify - template to be updated, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - New DeidentifyTemplate value. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - deidentify_template: 'DeidentifyTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyTemplate', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetDeidentifyTemplateRequest(proto.Message): - r"""Request message for GetDeidentifyTemplate. - - Attributes: - name (str): - Required. Resource name of the organization and deidentify - template to be read, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDeidentifyTemplatesRequest(proto.Message): - r"""Request message for ListDeidentifyTemplates. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from previous call - to ``ListDeidentifyTemplates``. - page_size (int): - Size of the page, can be limited by the - server. If zero server returns a page of max - size 100. - order_by (str): - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space - characters are insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the template was - created. - - ``update_time``: corresponds to the time the template was - last updated. - - ``name``: corresponds to the template's name. - - ``display_name``: corresponds to the template's display - name. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListDeidentifyTemplatesResponse(proto.Message): - r"""Response message for ListDeidentifyTemplates. - - Attributes: - deidentify_templates (MutableSequence[google.cloud.dlp_v2.types.DeidentifyTemplate]): - List of deidentify templates, up to page_size in - ListDeidentifyTemplatesRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in following - ListDeidentifyTemplates request. - """ - - @property - def raw_page(self): - return self - - deidentify_templates: MutableSequence['DeidentifyTemplate'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DeidentifyTemplate', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteDeidentifyTemplateRequest(proto.Message): - r"""Request message for DeleteDeidentifyTemplate. - - Attributes: - name (str): - Required. Resource name of the organization and deidentify - template to be deleted, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class LargeCustomDictionaryConfig(proto.Message): - r"""Configuration for a custom dictionary created from a data source of - any size up to the maximum size defined in the - `limits `__ page. The artifacts - of dictionary creation are stored in the specified Cloud Storage - location. Consider using ``CustomInfoType.Dictionary`` for smaller - dictionaries that satisfy the size requirements. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - output_path (google.cloud.dlp_v2.types.CloudStoragePath): - Location to store dictionary artifacts in - Cloud Storage. These files will only be - accessible by project owners and the DLP API. If - any of these artifacts are modified, the - dictionary is considered invalid and can no - longer be used. - cloud_storage_file_set (google.cloud.dlp_v2.types.CloudStorageFileSet): - Set of files containing newline-delimited - lists of dictionary phrases. - - This field is a member of `oneof`_ ``source``. - big_query_field (google.cloud.dlp_v2.types.BigQueryField): - Field in a BigQuery table where each cell - represents a dictionary phrase. - - This field is a member of `oneof`_ ``source``. - """ - - output_path: storage.CloudStoragePath = proto.Field( - proto.MESSAGE, - number=1, - message=storage.CloudStoragePath, - ) - cloud_storage_file_set: storage.CloudStorageFileSet = proto.Field( - proto.MESSAGE, - number=2, - oneof='source', - message=storage.CloudStorageFileSet, - ) - big_query_field: storage.BigQueryField = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message=storage.BigQueryField, - ) - - -class LargeCustomDictionaryStats(proto.Message): - r"""Summary statistics of a custom dictionary. - - Attributes: - approx_num_phrases (int): - Approximate number of distinct phrases in the - dictionary. - """ - - approx_num_phrases: int = proto.Field( - proto.INT64, - number=1, - ) - - -class StoredInfoTypeConfig(proto.Message): - r"""Configuration for stored infoTypes. All fields and subfield - are provided by the user. For more information, see - https://cloud.google.com/dlp/docs/creating-custom-infotypes. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - display_name (str): - Display name of the StoredInfoType (max 256 - characters). - description (str): - Description of the StoredInfoType (max 256 - characters). - large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryConfig): - StoredInfoType where findings are defined by - a dictionary of phrases. - - This field is a member of `oneof`_ ``type``. - dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): - Store dictionary-based CustomInfoType. - - This field is a member of `oneof`_ ``type``. - regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Store regular expression-based - StoredInfoType. - - This field is a member of `oneof`_ ``type``. - """ - - display_name: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - large_custom_dictionary: 'LargeCustomDictionaryConfig' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='LargeCustomDictionaryConfig', - ) - dictionary: storage.CustomInfoType.Dictionary = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message=storage.CustomInfoType.Dictionary, - ) - regex: storage.CustomInfoType.Regex = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message=storage.CustomInfoType.Regex, - ) - - -class StoredInfoTypeStats(proto.Message): - r"""Statistics for a StoredInfoType. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryStats): - StoredInfoType where findings are defined by - a dictionary of phrases. - - This field is a member of `oneof`_ ``type``. - """ - - large_custom_dictionary: 'LargeCustomDictionaryStats' = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message='LargeCustomDictionaryStats', - ) - - -class StoredInfoTypeVersion(proto.Message): - r"""Version of a StoredInfoType, including the configuration used - to build it, create timestamp, and current state. - - Attributes: - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - StoredInfoType configuration. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Create timestamp of the version. Read-only, - determined by the system when the version is - created. - state (google.cloud.dlp_v2.types.StoredInfoTypeState): - Stored info type version state. Read-only, - updated by the system during dictionary - creation. - errors (MutableSequence[google.cloud.dlp_v2.types.Error]): - Errors that occurred when creating this storedInfoType - version, or anomalies detected in the storedInfoType data - that render it unusable. Only the five most recent errors - will be displayed, with the most recent error appearing - first. - - For example, some of the data for stored custom dictionaries - is put in the user's Cloud Storage bucket, and if this data - is modified or deleted by the user or another system, the - dictionary becomes invalid. - - If any errors occur, fix the problem indicated by the error - message and use the UpdateStoredInfoType API method to - create another version of the storedInfoType to continue - using it, reusing the same ``config`` if it was not the - source of the error. - stats (google.cloud.dlp_v2.types.StoredInfoTypeStats): - Statistics about this storedInfoType version. - """ - - config: 'StoredInfoTypeConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='StoredInfoTypeConfig', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - state: 'StoredInfoTypeState' = proto.Field( - proto.ENUM, - number=3, - enum='StoredInfoTypeState', - ) - errors: MutableSequence['Error'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='Error', - ) - stats: 'StoredInfoTypeStats' = proto.Field( - proto.MESSAGE, - number=5, - message='StoredInfoTypeStats', - ) - - -class StoredInfoType(proto.Message): - r"""StoredInfoType resource message that contains information - about the current version and any pending updates. - - Attributes: - name (str): - Resource name. - current_version (google.cloud.dlp_v2.types.StoredInfoTypeVersion): - Current version of the stored info type. - pending_versions (MutableSequence[google.cloud.dlp_v2.types.StoredInfoTypeVersion]): - Pending versions of the stored info type. - Empty if no versions are pending. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - current_version: 'StoredInfoTypeVersion' = proto.Field( - proto.MESSAGE, - number=2, - message='StoredInfoTypeVersion', - ) - pending_versions: MutableSequence['StoredInfoTypeVersion'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='StoredInfoTypeVersion', - ) - - -class CreateStoredInfoTypeRequest(proto.Message): - r"""Request message for CreateStoredInfoType. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Required. Configuration of the storedInfoType - to create. - stored_info_type_id (str): - The storedInfoType ID can contain uppercase and lowercase - letters, numbers, and hyphens; that is, it must match the - regular expression: ``[a-zA-Z\d-_]+``. The maximum length is - 100 characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - config: 'StoredInfoTypeConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='StoredInfoTypeConfig', - ) - stored_info_type_id: str = proto.Field( - proto.STRING, - number=3, - ) - location_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class UpdateStoredInfoTypeRequest(proto.Message): - r"""Request message for UpdateStoredInfoType. - - Attributes: - name (str): - Required. Resource name of organization and storedInfoType - to be updated, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Updated configuration for the storedInfoType. - If not provided, a new version of the - storedInfoType will be created with the existing - configuration. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - config: 'StoredInfoTypeConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='StoredInfoTypeConfig', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetStoredInfoTypeRequest(proto.Message): - r"""Request message for GetStoredInfoType. - - Attributes: - name (str): - Required. Resource name of the organization and - storedInfoType to be read, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListStoredInfoTypesRequest(proto.Message): - r"""Request message for ListStoredInfoTypes. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from previous call - to ``ListStoredInfoTypes``. - page_size (int): - Size of the page, can be limited by the - server. If zero server returns a page of max - size 100. - order_by (str): - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space - characters are insignificant. - - Example: ``name asc, display_name, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the most recent - version of the resource was created. - - ``state``: corresponds to the state of the resource. - - ``name``: corresponds to resource name. - - ``display_name``: corresponds to info type's display - name. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListStoredInfoTypesResponse(proto.Message): - r"""Response message for ListStoredInfoTypes. - - Attributes: - stored_info_types (MutableSequence[google.cloud.dlp_v2.types.StoredInfoType]): - List of storedInfoTypes, up to page_size in - ListStoredInfoTypesRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in following - ListStoredInfoTypes request. - """ - - @property - def raw_page(self): - return self - - stored_info_types: MutableSequence['StoredInfoType'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='StoredInfoType', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteStoredInfoTypeRequest(proto.Message): - r"""Request message for DeleteStoredInfoType. - - Attributes: - name (str): - Required. Resource name of the organization and - storedInfoType to be deleted, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class HybridInspectJobTriggerRequest(proto.Message): - r"""Request to search for potentially sensitive info in a custom - location. - - Attributes: - name (str): - Required. Resource name of the trigger to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): - The item to inspect. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - hybrid_item: 'HybridContentItem' = proto.Field( - proto.MESSAGE, - number=3, - message='HybridContentItem', - ) - - -class HybridInspectDlpJobRequest(proto.Message): - r"""Request to search for potentially sensitive info in a custom - location. - - Attributes: - name (str): - Required. Resource name of the job to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/dlpJob/53234423``. - hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): - The item to inspect. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - hybrid_item: 'HybridContentItem' = proto.Field( - proto.MESSAGE, - number=3, - message='HybridContentItem', - ) - - -class HybridContentItem(proto.Message): - r"""An individual hybrid item to inspect. Will be stored - temporarily during processing. - - Attributes: - item (google.cloud.dlp_v2.types.ContentItem): - The item to inspect. - finding_details (google.cloud.dlp_v2.types.HybridFindingDetails): - Supplementary information that will be added - to each finding. - """ - - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=1, - message='ContentItem', - ) - finding_details: 'HybridFindingDetails' = proto.Field( - proto.MESSAGE, - number=2, - message='HybridFindingDetails', - ) - - -class HybridFindingDetails(proto.Message): - r"""Populate to associate additional data with each finding. - - Attributes: - container_details (google.cloud.dlp_v2.types.Container): - Details about the container where the content - being inspected is from. - file_offset (int): - Offset in bytes of the line, from the - beginning of the file, where the finding is - located. Populate if the item being scanned is - only part of a bigger item, such as a shard of a - file and you want to track the absolute position - of the finding. - row_offset (int): - Offset of the row for tables. Populate if the - row(s) being scanned are part of a bigger - dataset and you want to keep track of their - absolute position. - table_options (google.cloud.dlp_v2.types.TableOptions): - If the container is a table, additional information to make - findings meaningful such as the columns that are primary - keys. If not known ahead of time, can also be set within - each inspect hybrid call and the two will be merged. Note - that identifying_fields will only be stored to BigQuery, and - only if the BigQuery action has been included. - labels (MutableMapping[str, str]): - Labels to represent user provided metadata about the data - being inspected. If configured by the job, some key values - may be required. The labels associated with ``Finding``'s - produced by hybrid inspection. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 10 labels can be associated with a given - finding. - - Examples: - - - ``"environment" : "production"`` - - ``"pipeline" : "etl"`` - """ - - container_details: 'Container' = proto.Field( - proto.MESSAGE, - number=1, - message='Container', - ) - file_offset: int = proto.Field( - proto.INT64, - number=2, - ) - row_offset: int = proto.Field( - proto.INT64, - number=3, - ) - table_options: storage.TableOptions = proto.Field( - proto.MESSAGE, - number=4, - message=storage.TableOptions, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - - -class HybridInspectResponse(proto.Message): - r"""Quota exceeded errors will be thrown once quota has been met. - """ - - -class DataRiskLevel(proto.Message): - r"""Score is a summary of all elements in the data profile. - A higher number means more risk. - - Attributes: - score (google.cloud.dlp_v2.types.DataRiskLevel.DataRiskLevelScore): - The score applied to the resource. - """ - class DataRiskLevelScore(proto.Enum): - r"""Various score levels for resources. - - Values: - RISK_SCORE_UNSPECIFIED (0): - Unused. - RISK_LOW (10): - Low risk - Lower indication of sensitive data - that appears to have additional access - restrictions in place or no indication of - sensitive data found. - RISK_MODERATE (20): - Medium risk - Sensitive data may be present - but additional access or fine grain access - restrictions appear to be present. Consider - limiting access even further or transform data - to mask. - RISK_HIGH (30): - High risk – SPII may be present. Access - controls may include public ACLs. Exfiltration - of data may lead to user data loss. - Re-identification of users may be possible. - Consider limiting usage and or removing SPII. - """ - RISK_SCORE_UNSPECIFIED = 0 - RISK_LOW = 10 - RISK_MODERATE = 20 - RISK_HIGH = 30 - - score: DataRiskLevelScore = proto.Field( - proto.ENUM, - number=1, - enum=DataRiskLevelScore, - ) - - -class DataProfileConfigSnapshot(proto.Message): - r"""Snapshot of the configurations used to generate the profile. - - Attributes: - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - A copy of the inspection config used to generate this - profile. This is a copy of the inspect_template specified in - ``DataProfileJobConfig``. - data_profile_job (google.cloud.dlp_v2.types.DataProfileJobConfig): - A copy of the configuration used to generate - this profile. - """ - - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - data_profile_job: 'DataProfileJobConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='DataProfileJobConfig', - ) - - -class TableDataProfile(proto.Message): - r"""The profile for a scanned table. - - Attributes: - name (str): - The name of the profile. - project_data_profile (str): - The resource name to the project data profile - for this table. - dataset_project_id (str): - The GCP project ID that owns the BigQuery - dataset. - dataset_location (str): - The BigQuery location where the dataset's - data is stored. See - https://cloud.google.com/bigquery/docs/locations - for supported locations. - dataset_id (str): - The BigQuery dataset ID. - table_id (str): - The BigQuery table ID. - full_resource (str): - The resource name of the table. - https://cloud.google.com/apis/design/resource_names#full_resource_name - profile_status (google.cloud.dlp_v2.types.ProfileStatus): - Success or error status from the most recent - profile generation attempt. May be empty if the - profile is still being generated. - state (google.cloud.dlp_v2.types.TableDataProfile.State): - State of a profile. - sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): - The sensitivity score of this table. - data_risk_level (google.cloud.dlp_v2.types.DataRiskLevel): - The data risk level of this table. - predicted_info_types (MutableSequence[google.cloud.dlp_v2.types.InfoTypeSummary]): - The infoTypes predicted from this table's - data. - other_info_types (MutableSequence[google.cloud.dlp_v2.types.OtherInfoTypeSummary]): - Other infoTypes found in this table's data. - config_snapshot (google.cloud.dlp_v2.types.DataProfileConfigSnapshot): - The snapshot of the configurations used to - generate the profile. - last_modified_time (google.protobuf.timestamp_pb2.Timestamp): - The time when this table was last modified - expiration_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The time when this table expires. - scanned_column_count (int): - The number of columns profiled in the table. - failed_column_count (int): - The number of columns skipped in the table - because of an error. - table_size_bytes (int): - The size of the table when the profile was - generated. - row_count (int): - Number of rows in the table when the profile - was generated. This will not be populated for - BigLake tables. - encryption_status (google.cloud.dlp_v2.types.EncryptionStatus): - How the table is encrypted. - resource_visibility (google.cloud.dlp_v2.types.ResourceVisibility): - How broadly a resource has been shared. - profile_last_generated (google.protobuf.timestamp_pb2.Timestamp): - The last time the profile was generated. - resource_labels (MutableMapping[str, str]): - The labels applied to the resource at the - time the profile was generated. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the table was created. - """ - class State(proto.Enum): - r"""Possible states of a profile. New items may be added. - - Values: - STATE_UNSPECIFIED (0): - Unused. - RUNNING (1): - The profile is currently running. Once a - profile has finished it will transition to DONE. - DONE (2): - The profile is no longer generating. If - profile_status.status.code is 0, the profile succeeded, - otherwise, it failed. - """ - STATE_UNSPECIFIED = 0 - RUNNING = 1 - DONE = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - project_data_profile: str = proto.Field( - proto.STRING, - number=2, - ) - dataset_project_id: str = proto.Field( - proto.STRING, - number=24, - ) - dataset_location: str = proto.Field( - proto.STRING, - number=29, - ) - dataset_id: str = proto.Field( - proto.STRING, - number=25, - ) - table_id: str = proto.Field( - proto.STRING, - number=26, - ) - full_resource: str = proto.Field( - proto.STRING, - number=3, - ) - profile_status: 'ProfileStatus' = proto.Field( - proto.MESSAGE, - number=21, - message='ProfileStatus', - ) - state: State = proto.Field( - proto.ENUM, - number=22, - enum=State, - ) - sensitivity_score: storage.SensitivityScore = proto.Field( - proto.MESSAGE, - number=5, - message=storage.SensitivityScore, - ) - data_risk_level: 'DataRiskLevel' = proto.Field( - proto.MESSAGE, - number=6, - message='DataRiskLevel', - ) - predicted_info_types: MutableSequence['InfoTypeSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=27, - message='InfoTypeSummary', - ) - other_info_types: MutableSequence['OtherInfoTypeSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=28, - message='OtherInfoTypeSummary', - ) - config_snapshot: 'DataProfileConfigSnapshot' = proto.Field( - proto.MESSAGE, - number=7, - message='DataProfileConfigSnapshot', - ) - last_modified_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - expiration_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - scanned_column_count: int = proto.Field( - proto.INT64, - number=10, - ) - failed_column_count: int = proto.Field( - proto.INT64, - number=11, - ) - table_size_bytes: int = proto.Field( - proto.INT64, - number=12, - ) - row_count: int = proto.Field( - proto.INT64, - number=13, - ) - encryption_status: 'EncryptionStatus' = proto.Field( - proto.ENUM, - number=14, - enum='EncryptionStatus', - ) - resource_visibility: 'ResourceVisibility' = proto.Field( - proto.ENUM, - number=15, - enum='ResourceVisibility', - ) - profile_last_generated: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=16, - message=timestamp_pb2.Timestamp, - ) - resource_labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=17, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=23, - message=timestamp_pb2.Timestamp, - ) - - -class ProfileStatus(proto.Message): - r""" - - Attributes: - status (google.rpc.status_pb2.Status): - Profiling status code and optional message - timestamp (google.protobuf.timestamp_pb2.Timestamp): - Time when the profile generation status was - updated - """ - - status: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=1, - message=status_pb2.Status, - ) - timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class InfoTypeSummary(proto.Message): - r"""The infoType details for this column. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - The infoType. - estimated_prevalence (int): - Not populated for predicted infotypes. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - estimated_prevalence: int = proto.Field( - proto.INT32, - number=2, - ) - - -class OtherInfoTypeSummary(proto.Message): - r"""Infotype details for other infoTypes found within a column. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - The other infoType. - estimated_prevalence (int): - Approximate percentage of non-null rows that - contained data detected by this infotype. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - estimated_prevalence: int = proto.Field( - proto.INT32, - number=2, - ) - - -class DataProfilePubSubCondition(proto.Message): - r"""A condition for determining whether a Pub/Sub should be - triggered. - - Attributes: - expressions (google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubExpressions): - An expression. - """ - class ProfileScoreBucket(proto.Enum): - r"""Various score levels for resources. - - Values: - PROFILE_SCORE_BUCKET_UNSPECIFIED (0): - Unused. - HIGH (1): - High risk/sensitivity detected. - MEDIUM_OR_HIGH (2): - Medium or high risk/sensitivity detected. - """ - PROFILE_SCORE_BUCKET_UNSPECIFIED = 0 - HIGH = 1 - MEDIUM_OR_HIGH = 2 - - class PubSubCondition(proto.Message): - r"""A condition consisting of a value. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - minimum_risk_score (google.cloud.dlp_v2.types.DataProfilePubSubCondition.ProfileScoreBucket): - The minimum data risk score that triggers the - condition. - - This field is a member of `oneof`_ ``value``. - minimum_sensitivity_score (google.cloud.dlp_v2.types.DataProfilePubSubCondition.ProfileScoreBucket): - The minimum sensitivity level that triggers - the condition. - - This field is a member of `oneof`_ ``value``. - """ - - minimum_risk_score: 'DataProfilePubSubCondition.ProfileScoreBucket' = proto.Field( - proto.ENUM, - number=1, - oneof='value', - enum='DataProfilePubSubCondition.ProfileScoreBucket', - ) - minimum_sensitivity_score: 'DataProfilePubSubCondition.ProfileScoreBucket' = proto.Field( - proto.ENUM, - number=2, - oneof='value', - enum='DataProfilePubSubCondition.ProfileScoreBucket', - ) - - class PubSubExpressions(proto.Message): - r"""An expression, consisting of an operator and conditions. - - Attributes: - logical_operator (google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator): - The operator to apply to the collection of - conditions. - conditions (MutableSequence[google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubCondition]): - Conditions to apply to the expression. - """ - class PubSubLogicalOperator(proto.Enum): - r"""Logical operators for conditional checks. - - Values: - LOGICAL_OPERATOR_UNSPECIFIED (0): - Unused. - OR (1): - Conditional OR. - AND (2): - Conditional AND. - """ - LOGICAL_OPERATOR_UNSPECIFIED = 0 - OR = 1 - AND = 2 - - logical_operator: 'DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator' = proto.Field( - proto.ENUM, - number=1, - enum='DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator', - ) - conditions: MutableSequence['DataProfilePubSubCondition.PubSubCondition'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='DataProfilePubSubCondition.PubSubCondition', - ) - - expressions: PubSubExpressions = proto.Field( - proto.MESSAGE, - number=1, - message=PubSubExpressions, - ) - - -class DataProfilePubSubMessage(proto.Message): - r"""Pub/Sub topic message for a - DataProfileAction.PubSubNotification event. To receive a message - of protocol buffer schema type, convert the message data to an - object of this proto class. - - Attributes: - profile (google.cloud.dlp_v2.types.TableDataProfile): - If ``DetailLevel`` is ``TABLE_PROFILE`` this will be fully - populated. Otherwise, if ``DetailLevel`` is - ``RESOURCE_NAME``, then only ``name`` and ``full_resource`` - will be populated. - event (google.cloud.dlp_v2.types.DataProfileAction.EventType): - The event that caused the Pub/Sub message to - be sent. - """ - - profile: 'TableDataProfile' = proto.Field( - proto.MESSAGE, - number=1, - message='TableDataProfile', - ) - event: 'DataProfileAction.EventType' = proto.Field( - proto.ENUM, - number=2, - enum='DataProfileAction.EventType', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py deleted file mode 100644 index 4522dfd7..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py +++ /dev/null @@ -1,1476 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.privacy.dlp.v2', - manifest={ - 'Likelihood', - 'FileType', - 'InfoType', - 'SensitivityScore', - 'StoredType', - 'CustomInfoType', - 'FieldId', - 'PartitionId', - 'KindExpression', - 'DatastoreOptions', - 'CloudStorageRegexFileSet', - 'CloudStorageOptions', - 'CloudStorageFileSet', - 'CloudStoragePath', - 'BigQueryOptions', - 'StorageConfig', - 'HybridOptions', - 'BigQueryKey', - 'DatastoreKey', - 'Key', - 'RecordKey', - 'BigQueryTable', - 'BigQueryField', - 'EntityId', - 'TableOptions', - }, -) - - -class Likelihood(proto.Enum): - r"""Categorization of results based on how likely they are to - represent a match, based on the number of elements they contain - which imply a match. - - Values: - LIKELIHOOD_UNSPECIFIED (0): - Default value; same as POSSIBLE. - VERY_UNLIKELY (1): - Few matching elements. - UNLIKELY (2): - No description available. - POSSIBLE (3): - Some matching elements. - LIKELY (4): - No description available. - VERY_LIKELY (5): - Many matching elements. - """ - LIKELIHOOD_UNSPECIFIED = 0 - VERY_UNLIKELY = 1 - UNLIKELY = 2 - POSSIBLE = 3 - LIKELY = 4 - VERY_LIKELY = 5 - - -class FileType(proto.Enum): - r"""Definitions of file type groups to scan. New types will be - added to this list. - - Values: - FILE_TYPE_UNSPECIFIED (0): - Includes all files. - BINARY_FILE (1): - Includes all file extensions not covered by another entry. - Binary scanning attempts to convert the content of the file - to utf_8 to scan the file. If you wish to avoid this fall - back, specify one or more of the other FileType's in your - storage scan. - TEXT_FILE (2): - Included file extensions: - asc,asp, aspx, brf, c, cc,cfm, cgi, cpp, csv, - cxx, c++, cs, css, dart, dat, dot, eml,, - epbub, ged, go, h, hh, hpp, hxx, h++, hs, html, - htm, mkd, markdown, m, ml, mli, perl, pl, - plist, pm, php, phtml, pht, properties, py, - pyw, rb, rbw, rs, rss, rc, scala, sh, sql, - swift, tex, shtml, shtm, xhtml, lhs, ics, ini, - java, js, json, kix, kml, ocaml, md, txt, - text, tsv, vb, vcard, vcs, wml, xcodeproj, xml, - xsl, xsd, yml, yaml. - IMAGE (3): - Included file extensions: bmp, gif, jpg, jpeg, jpe, png. - bytes_limit_per_file has no effect on image files. Image - inspection is restricted to 'global', 'us', 'asia', and - 'europe'. - WORD (5): - Word files >30 MB will be scanned as binary - files. Included file extensions: - docx, dotx, docm, dotm - PDF (6): - PDF files >30 MB will be scanned as binary - files. Included file extensions: - pdf - AVRO (7): - Included file extensions: - avro - CSV (8): - Included file extensions: - csv - TSV (9): - Included file extensions: - tsv - POWERPOINT (11): - Powerpoint files >30 MB will be scanned as - binary files. Included file extensions: - pptx, pptm, potx, potm, pot - EXCEL (12): - Excel files >30 MB will be scanned as binary - files. Included file extensions: - xlsx, xlsm, xltx, xltm - """ - FILE_TYPE_UNSPECIFIED = 0 - BINARY_FILE = 1 - TEXT_FILE = 2 - IMAGE = 3 - WORD = 5 - PDF = 6 - AVRO = 7 - CSV = 8 - TSV = 9 - POWERPOINT = 11 - EXCEL = 12 - - -class InfoType(proto.Message): - r"""Type of information detected by the API. - - Attributes: - name (str): - Name of the information type. Either a name of your choosing - when creating a CustomInfoType, or one of the names listed - at https://cloud.google.com/dlp/docs/infotypes-reference - when specifying a built-in type. When sending Cloud DLP - results to Data Catalog, infoType names should conform to - the pattern ``[A-Za-z0-9$_-]{1,64}``. - version (str): - Optional version name for this InfoType. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - version: str = proto.Field( - proto.STRING, - number=2, - ) - - -class SensitivityScore(proto.Message): - r"""Score is a summary of all elements in the data profile. - A higher number means more sensitive. - - Attributes: - score (google.cloud.dlp_v2.types.SensitivityScore.SensitivityScoreLevel): - The score applied to the resource. - """ - class SensitivityScoreLevel(proto.Enum): - r"""Various score levels for resources. - - Values: - SENSITIVITY_SCORE_UNSPECIFIED (0): - Unused. - SENSITIVITY_LOW (10): - No sensitive information detected. Limited - access. - SENSITIVITY_MODERATE (20): - Medium risk - PII, potentially sensitive - data, or fields with free-text data that are at - higher risk of having intermittent sensitive - data. Consider limiting access. - SENSITIVITY_HIGH (30): - High risk – SPII may be present. Exfiltration - of data may lead to user data loss. - Re-identification of users may be possible. - Consider limiting usage and or removing SPII. - """ - SENSITIVITY_SCORE_UNSPECIFIED = 0 - SENSITIVITY_LOW = 10 - SENSITIVITY_MODERATE = 20 - SENSITIVITY_HIGH = 30 - - score: SensitivityScoreLevel = proto.Field( - proto.ENUM, - number=1, - enum=SensitivityScoreLevel, - ) - - -class StoredType(proto.Message): - r"""A reference to a StoredInfoType to use with scanning. - - Attributes: - name (str): - Resource name of the requested ``StoredInfoType``, for - example - ``organizations/433245324/storedInfoTypes/432452342`` or - ``projects/project-id/storedInfoTypes/432452342``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp indicating when the version of the - ``StoredInfoType`` used for inspection was created. - Output-only field, populated by the system. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class CustomInfoType(proto.Message): - r"""Custom information type provided by the user. Used to find - domain-specific sensitive information configurable to the data - in question. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - CustomInfoType can either be a new infoType, or an extension - of built-in infoType, when the name matches one of existing - infoTypes and that infoType is specified in - ``InspectContent.info_types`` field. Specifying the latter - adds findings to the one detected by the system. If built-in - info type is not specified in ``InspectContent.info_types`` - list then the name is treated as a custom info type. - likelihood (google.cloud.dlp_v2.types.Likelihood): - Likelihood to return for this CustomInfoType. This base - value can be altered by a detection rule if the finding - meets the criteria specified by the rule. Defaults to - ``VERY_LIKELY`` if not specified. - dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): - A list of phrases to detect as a - CustomInfoType. - - This field is a member of `oneof`_ ``type``. - regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression based CustomInfoType. - - This field is a member of `oneof`_ ``type``. - surrogate_type (google.cloud.dlp_v2.types.CustomInfoType.SurrogateType): - Message for detecting output from - deidentification transformations that support - reversing. - - This field is a member of `oneof`_ ``type``. - stored_type (google.cloud.dlp_v2.types.StoredType): - Load an existing ``StoredInfoType`` resource for use in - ``InspectDataSource``. Not currently supported in - ``InspectContent``. - - This field is a member of `oneof`_ ``type``. - detection_rules (MutableSequence[google.cloud.dlp_v2.types.CustomInfoType.DetectionRule]): - Set of detection rules to apply to all findings of this - CustomInfoType. Rules are applied in order that they are - specified. Not supported for the ``surrogate_type`` - CustomInfoType. - exclusion_type (google.cloud.dlp_v2.types.CustomInfoType.ExclusionType): - If set to EXCLUSION_TYPE_EXCLUDE this infoType will not - cause a finding to be returned. It still can be used for - rules matching. - """ - class ExclusionType(proto.Enum): - r""" - - Values: - EXCLUSION_TYPE_UNSPECIFIED (0): - A finding of this custom info type will not - be excluded from results. - EXCLUSION_TYPE_EXCLUDE (1): - A finding of this custom info type will be - excluded from final results, but can still - affect rule execution. - """ - EXCLUSION_TYPE_UNSPECIFIED = 0 - EXCLUSION_TYPE_EXCLUDE = 1 - - class Dictionary(proto.Message): - r"""Custom information type based on a dictionary of words or phrases. - This can be used to match sensitive information specific to the - data, such as a list of employee IDs or job titles. - - Dictionary words are case-insensitive and all characters other than - letters and digits in the unicode `Basic Multilingual - Plane `__ - will be replaced with whitespace when scanning for matches, so the - dictionary phrase "Sam Johnson" will match all three phrases "sam - johnson", "Sam, Johnson", and "Sam (Johnson)". Additionally, the - characters surrounding any match must be of a different type than - the adjacent characters within the word, so letters must be next to - non-letters and digits next to non-digits. For example, the - dictionary word "jen" will match the first three letters of the text - "jen123" but will return no matches for "jennifer". - - Dictionary words containing a large number of characters that are - not letters or digits may result in unexpected findings because such - characters are treated as whitespace. The - `limits `__ page contains - details about the size limits of dictionaries. For dictionaries that - do not fit within these constraints, consider using - ``LargeCustomDictionaryConfig`` in the ``StoredInfoType`` API. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): - List of words or phrases to search for. - - This field is a member of `oneof`_ ``source``. - cloud_storage_path (google.cloud.dlp_v2.types.CloudStoragePath): - Newline-delimited file of words in Cloud - Storage. Only a single file is accepted. - - This field is a member of `oneof`_ ``source``. - """ - - class WordList(proto.Message): - r"""Message defining a list of words or phrases to search for in - the data. - - Attributes: - words (MutableSequence[str]): - Words or phrases defining the dictionary. The dictionary - must contain at least one phrase and every phrase must - contain at least 2 characters that are letters or digits. - [required] - """ - - words: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - word_list: 'CustomInfoType.Dictionary.WordList' = proto.Field( - proto.MESSAGE, - number=1, - oneof='source', - message='CustomInfoType.Dictionary.WordList', - ) - cloud_storage_path: 'CloudStoragePath' = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message='CloudStoragePath', - ) - - class Regex(proto.Message): - r"""Message defining a custom regular expression. - - Attributes: - pattern (str): - Pattern defining the regular expression. Its - syntax - (https://github.com/google/re2/wiki/Syntax) can - be found under the google/re2 repository on - GitHub. - group_indexes (MutableSequence[int]): - The index of the submatch to extract as - findings. When not specified, the entire match - is returned. No more than 3 may be included. - """ - - pattern: str = proto.Field( - proto.STRING, - number=1, - ) - group_indexes: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=2, - ) - - class SurrogateType(proto.Message): - r"""Message for detecting output from deidentification transformations - such as - ```CryptoReplaceFfxFpeConfig`` `__. - These types of transformations are those that perform - pseudonymization, thereby producing a "surrogate" as output. This - should be used in conjunction with a field on the transformation - such as ``surrogate_info_type``. This CustomInfoType does not - support the use of ``detection_rules``. - - """ - - class DetectionRule(proto.Message): - r"""Deprecated; use ``InspectionRuleSet`` instead. Rule for modifying a - ``CustomInfoType`` to alter behavior under certain circumstances, - depending on the specific details of the rule. Not supported for the - ``surrogate_type`` custom infoType. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): - Hotword-based detection rule. - - This field is a member of `oneof`_ ``type``. - """ - - class Proximity(proto.Message): - r"""Message for specifying a window around a finding to apply a - detection rule. - - Attributes: - window_before (int): - Number of characters before the finding to consider. For - tabular data, if you want to modify the likelihood of an - entire column of findngs, set this to 1. For more - information, see [Hotword example: Set the match likelihood - of a table column] - (https://cloud.google.com/dlp/docs/creating-custom-infotypes-likelihood#match-column-values). - window_after (int): - Number of characters after the finding to - consider. - """ - - window_before: int = proto.Field( - proto.INT32, - number=1, - ) - window_after: int = proto.Field( - proto.INT32, - number=2, - ) - - class LikelihoodAdjustment(proto.Message): - r"""Message for specifying an adjustment to the likelihood of a - finding as part of a detection rule. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - fixed_likelihood (google.cloud.dlp_v2.types.Likelihood): - Set the likelihood of a finding to a fixed - value. - - This field is a member of `oneof`_ ``adjustment``. - relative_likelihood (int): - Increase or decrease the likelihood by the specified number - of levels. For example, if a finding would be ``POSSIBLE`` - without the detection rule and ``relative_likelihood`` is 1, - then it is upgraded to ``LIKELY``, while a value of -1 would - downgrade it to ``UNLIKELY``. Likelihood may never drop - below ``VERY_UNLIKELY`` or exceed ``VERY_LIKELY``, so - applying an adjustment of 1 followed by an adjustment of -1 - when base likelihood is ``VERY_LIKELY`` will result in a - final likelihood of ``LIKELY``. - - This field is a member of `oneof`_ ``adjustment``. - """ - - fixed_likelihood: 'Likelihood' = proto.Field( - proto.ENUM, - number=1, - oneof='adjustment', - enum='Likelihood', - ) - relative_likelihood: int = proto.Field( - proto.INT32, - number=2, - oneof='adjustment', - ) - - class HotwordRule(proto.Message): - r"""The rule that adjusts the likelihood of findings within a - certain proximity of hotwords. - - Attributes: - hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression pattern defining what - qualifies as a hotword. - proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): - Range of characters within which the entire hotword must - reside. The total length of the window cannot exceed 1000 - characters. The finding itself will be included in the - window, so that hotwords can be used to match substrings of - the finding itself. Suppose you want Cloud DLP to promote - the likelihood of the phone number regex "(\d{3}) - \\d{3}-\d{4}" if the area code is known to be the area code - of a company's office. In this case, use the hotword regex - "(xxx)", where "xxx" is the area code in question. - - For tabular data, if you want to modify the likelihood of an - entire column of findngs, see [Hotword example: Set the - match likelihood of a table column] - (https://cloud.google.com/dlp/docs/creating-custom-infotypes-likelihood#match-column-values). - likelihood_adjustment (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.LikelihoodAdjustment): - Likelihood adjustment to apply to all - matching findings. - """ - - hotword_regex: 'CustomInfoType.Regex' = proto.Field( - proto.MESSAGE, - number=1, - message='CustomInfoType.Regex', - ) - proximity: 'CustomInfoType.DetectionRule.Proximity' = proto.Field( - proto.MESSAGE, - number=2, - message='CustomInfoType.DetectionRule.Proximity', - ) - likelihood_adjustment: 'CustomInfoType.DetectionRule.LikelihoodAdjustment' = proto.Field( - proto.MESSAGE, - number=3, - message='CustomInfoType.DetectionRule.LikelihoodAdjustment', - ) - - hotword_rule: 'CustomInfoType.DetectionRule.HotwordRule' = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message='CustomInfoType.DetectionRule.HotwordRule', - ) - - info_type: 'InfoType' = proto.Field( - proto.MESSAGE, - number=1, - message='InfoType', - ) - likelihood: 'Likelihood' = proto.Field( - proto.ENUM, - number=6, - enum='Likelihood', - ) - dictionary: Dictionary = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message=Dictionary, - ) - regex: Regex = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message=Regex, - ) - surrogate_type: SurrogateType = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message=SurrogateType, - ) - stored_type: 'StoredType' = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message='StoredType', - ) - detection_rules: MutableSequence[DetectionRule] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message=DetectionRule, - ) - exclusion_type: ExclusionType = proto.Field( - proto.ENUM, - number=8, - enum=ExclusionType, - ) - - -class FieldId(proto.Message): - r"""General identifier of a data field in a storage service. - - Attributes: - name (str): - Name describing the field. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class PartitionId(proto.Message): - r"""Datastore partition ID. - A partition ID identifies a grouping of entities. The grouping - is always by project and namespace, however the namespace ID may - be empty. - A partition ID contains several dimensions: - project ID and namespace ID. - - Attributes: - project_id (str): - The ID of the project to which the entities - belong. - namespace_id (str): - If not empty, the ID of the namespace to - which the entities belong. - """ - - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - namespace_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class KindExpression(proto.Message): - r"""A representation of a Datastore kind. - - Attributes: - name (str): - The name of the kind. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DatastoreOptions(proto.Message): - r"""Options defining a data set within Google Cloud Datastore. - - Attributes: - partition_id (google.cloud.dlp_v2.types.PartitionId): - A partition ID identifies a grouping of - entities. The grouping is always by project and - namespace, however the namespace ID may be - empty. - kind (google.cloud.dlp_v2.types.KindExpression): - The kind to process. - """ - - partition_id: 'PartitionId' = proto.Field( - proto.MESSAGE, - number=1, - message='PartitionId', - ) - kind: 'KindExpression' = proto.Field( - proto.MESSAGE, - number=2, - message='KindExpression', - ) - - -class CloudStorageRegexFileSet(proto.Message): - r"""Message representing a set of files in a Cloud Storage bucket. - Regular expressions are used to allow fine-grained control over - which files in the bucket to include. - - Included files are those that match at least one item in - ``include_regex`` and do not match any items in ``exclude_regex``. - Note that a file that matches items from both lists will *not* be - included. For a match to occur, the entire file path (i.e., - everything in the url after the bucket name) must match the regular - expression. - - For example, given the input - ``{bucket_name: "mybucket", include_regex: ["directory1/.*"], exclude_regex: ["directory1/excluded.*"]}``: - - - ``gs://mybucket/directory1/myfile`` will be included - - ``gs://mybucket/directory1/directory2/myfile`` will be included - (``.*`` matches across ``/``) - - ``gs://mybucket/directory0/directory1/myfile`` will *not* be - included (the full path doesn't match any items in - ``include_regex``) - - ``gs://mybucket/directory1/excludedfile`` will *not* be included - (the path matches an item in ``exclude_regex``) - - If ``include_regex`` is left empty, it will match all files by - default (this is equivalent to setting ``include_regex: [".*"]``). - - Some other common use cases: - - - ``{bucket_name: "mybucket", exclude_regex: [".*\.pdf"]}`` will - include all files in ``mybucket`` except for .pdf files - - ``{bucket_name: "mybucket", include_regex: ["directory/[^/]+"]}`` - will include all files directly under - ``gs://mybucket/directory/``, without matching across ``/`` - - Attributes: - bucket_name (str): - The name of a Cloud Storage bucket. Required. - include_regex (MutableSequence[str]): - A list of regular expressions matching file paths to - include. All files in the bucket that match at least one of - these regular expressions will be included in the set of - files, except for those that also match an item in - ``exclude_regex``. Leaving this field empty will match all - files by default (this is equivalent to including ``.*`` in - the list). - - Regular expressions use RE2 - `syntax `__; a - guide can be found under the google/re2 repository on - GitHub. - exclude_regex (MutableSequence[str]): - A list of regular expressions matching file paths to - exclude. All files in the bucket that match at least one of - these regular expressions will be excluded from the scan. - - Regular expressions use RE2 - `syntax `__; a - guide can be found under the google/re2 repository on - GitHub. - """ - - bucket_name: str = proto.Field( - proto.STRING, - number=1, - ) - include_regex: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - exclude_regex: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class CloudStorageOptions(proto.Message): - r"""Options defining a file or a set of files within a Cloud - Storage bucket. - - Attributes: - file_set (google.cloud.dlp_v2.types.CloudStorageOptions.FileSet): - The set of one or more files to scan. - bytes_limit_per_file (int): - Max number of bytes to scan from a file. If a scanned file's - size is bigger than this value then the rest of the bytes - are omitted. Only one of bytes_limit_per_file and - bytes_limit_per_file_percent can be specified. Cannot be set - if de-identification is requested. - bytes_limit_per_file_percent (int): - Max percentage of bytes to scan from a file. The rest are - omitted. The number of bytes scanned is rounded down. Must - be between 0 and 100, inclusively. Both 0 and 100 means no - limit. Defaults to 0. Only one of bytes_limit_per_file and - bytes_limit_per_file_percent can be specified. Cannot be set - if de-identification is requested. - file_types (MutableSequence[google.cloud.dlp_v2.types.FileType]): - List of file type groups to include in the scan. If empty, - all files are scanned and available data format processors - are applied. In addition, the binary content of the selected - files is always scanned as well. Images are scanned only as - binary if the specified region does not support image - inspection and no file_types were specified. Image - inspection is restricted to 'global', 'us', 'asia', and - 'europe'. - sample_method (google.cloud.dlp_v2.types.CloudStorageOptions.SampleMethod): - - files_limit_percent (int): - Limits the number of files to scan to this - percentage of the input FileSet. Number of files - scanned is rounded down. Must be between 0 and - 100, inclusively. Both 0 and 100 means no limit. - Defaults to 0. - """ - class SampleMethod(proto.Enum): - r"""How to sample bytes if not all bytes are scanned. Meaningful only - when used in conjunction with bytes_limit_per_file. If not - specified, scanning would start from the top. - - Values: - SAMPLE_METHOD_UNSPECIFIED (0): - No description available. - TOP (1): - Scan from the top (default). - RANDOM_START (2): - For each file larger than bytes_limit_per_file, randomly - pick the offset to start scanning. The scanned bytes are - contiguous. - """ - SAMPLE_METHOD_UNSPECIFIED = 0 - TOP = 1 - RANDOM_START = 2 - - class FileSet(proto.Message): - r"""Set of files to scan. - - Attributes: - url (str): - The Cloud Storage url of the file(s) to scan, in the format - ``gs:///``. Trailing wildcard in the path is - allowed. - - If the url ends in a trailing slash, the bucket or directory - represented by the url will be scanned non-recursively - (content in sub-directories will not be scanned). This means - that ``gs://mybucket/`` is equivalent to - ``gs://mybucket/*``, and ``gs://mybucket/directory/`` is - equivalent to ``gs://mybucket/directory/*``. - - Exactly one of ``url`` or ``regex_file_set`` must be set. - regex_file_set (google.cloud.dlp_v2.types.CloudStorageRegexFileSet): - The regex-filtered set of files to scan. Exactly one of - ``url`` or ``regex_file_set`` must be set. - """ - - url: str = proto.Field( - proto.STRING, - number=1, - ) - regex_file_set: 'CloudStorageRegexFileSet' = proto.Field( - proto.MESSAGE, - number=2, - message='CloudStorageRegexFileSet', - ) - - file_set: FileSet = proto.Field( - proto.MESSAGE, - number=1, - message=FileSet, - ) - bytes_limit_per_file: int = proto.Field( - proto.INT64, - number=4, - ) - bytes_limit_per_file_percent: int = proto.Field( - proto.INT32, - number=8, - ) - file_types: MutableSequence['FileType'] = proto.RepeatedField( - proto.ENUM, - number=5, - enum='FileType', - ) - sample_method: SampleMethod = proto.Field( - proto.ENUM, - number=6, - enum=SampleMethod, - ) - files_limit_percent: int = proto.Field( - proto.INT32, - number=7, - ) - - -class CloudStorageFileSet(proto.Message): - r"""Message representing a set of files in Cloud Storage. - - Attributes: - url (str): - The url, in the format ``gs:///``. Trailing - wildcard in the path is allowed. - """ - - url: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CloudStoragePath(proto.Message): - r"""Message representing a single file or path in Cloud Storage. - - Attributes: - path (str): - A url representing a file or path (no wildcards) in Cloud - Storage. Example: gs://[BUCKET_NAME]/dictionary.txt - """ - - path: str = proto.Field( - proto.STRING, - number=1, - ) - - -class BigQueryOptions(proto.Message): - r"""Options defining BigQuery table and row identifiers. - - Attributes: - table_reference (google.cloud.dlp_v2.types.BigQueryTable): - Complete BigQuery table reference. - identifying_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Table fields that may uniquely identify a row within the - table. When ``actions.saveFindings.outputConfig.table`` is - specified, the values of columns specified here are - available in the output table under - ``location.content_locations.record_location.record_key.id_values``. - Nested fields such as ``person.birthdate.year`` are allowed. - rows_limit (int): - Max number of rows to scan. If the table has more rows than - this value, the rest of the rows are omitted. If not set, or - if set to 0, all rows will be scanned. Only one of - rows_limit and rows_limit_percent can be specified. Cannot - be used in conjunction with TimespanConfig. - rows_limit_percent (int): - Max percentage of rows to scan. The rest are omitted. The - number of rows scanned is rounded down. Must be between 0 - and 100, inclusively. Both 0 and 100 means no limit. - Defaults to 0. Only one of rows_limit and rows_limit_percent - can be specified. Cannot be used in conjunction with - TimespanConfig. - sample_method (google.cloud.dlp_v2.types.BigQueryOptions.SampleMethod): - - excluded_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - References to fields excluded from scanning. - This allows you to skip inspection of entire - columns which you know have no findings. - included_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Limit scanning only to these fields. - """ - class SampleMethod(proto.Enum): - r"""How to sample rows if not all rows are scanned. Meaningful only when - used in conjunction with either rows_limit or rows_limit_percent. If - not specified, rows are scanned in the order BigQuery reads them. - - Values: - SAMPLE_METHOD_UNSPECIFIED (0): - No description available. - TOP (1): - Scan groups of rows in the order BigQuery - provides (default). Multiple groups of rows may - be scanned in parallel, so results may not - appear in the same order the rows are read. - RANDOM_START (2): - Randomly pick groups of rows to scan. - """ - SAMPLE_METHOD_UNSPECIFIED = 0 - TOP = 1 - RANDOM_START = 2 - - table_reference: 'BigQueryTable' = proto.Field( - proto.MESSAGE, - number=1, - message='BigQueryTable', - ) - identifying_fields: MutableSequence['FieldId'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='FieldId', - ) - rows_limit: int = proto.Field( - proto.INT64, - number=3, - ) - rows_limit_percent: int = proto.Field( - proto.INT32, - number=6, - ) - sample_method: SampleMethod = proto.Field( - proto.ENUM, - number=4, - enum=SampleMethod, - ) - excluded_fields: MutableSequence['FieldId'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='FieldId', - ) - included_fields: MutableSequence['FieldId'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='FieldId', - ) - - -class StorageConfig(proto.Message): - r"""Shared message indicating Cloud storage type. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - datastore_options (google.cloud.dlp_v2.types.DatastoreOptions): - Google Cloud Datastore options. - - This field is a member of `oneof`_ ``type``. - cloud_storage_options (google.cloud.dlp_v2.types.CloudStorageOptions): - Cloud Storage options. - - This field is a member of `oneof`_ ``type``. - big_query_options (google.cloud.dlp_v2.types.BigQueryOptions): - BigQuery options. - - This field is a member of `oneof`_ ``type``. - hybrid_options (google.cloud.dlp_v2.types.HybridOptions): - Hybrid inspection options. - - This field is a member of `oneof`_ ``type``. - timespan_config (google.cloud.dlp_v2.types.StorageConfig.TimespanConfig): - - """ - - class TimespanConfig(proto.Message): - r"""Configuration of the timespan of the items to include in - scanning. Currently only supported when inspecting Cloud Storage - and BigQuery. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - Exclude files, tables, or rows older than - this value. If not set, no lower time limit is - applied. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Exclude files, tables, or rows newer than - this value. If not set, no upper time limit is - applied. - timestamp_field (google.cloud.dlp_v2.types.FieldId): - Specification of the field containing the timestamp of - scanned items. Used for data sources like Datastore and - BigQuery. - - For BigQuery - - If this value is not specified and the table was modified - between the given start and end times, the entire table will - be scanned. If this value is specified, then rows are - filtered based on the given start and end times. Rows with a - ``NULL`` value in the provided BigQuery column are skipped. - Valid data types of the provided BigQuery column are: - ``INTEGER``, ``DATE``, ``TIMESTAMP``, and ``DATETIME``. - - If your BigQuery table is `partitioned at ingestion - time `__, - you can use any of the following pseudo-columns as your - timestamp field. When used with Cloud DLP, these - pseudo-column names are case sensitive. - - .. raw:: html - -
    -
  • _PARTITIONTIME
  • -
  • _PARTITIONDATE
  • -
  • _PARTITION_LOAD_TIME
  • -
- - For Datastore - - If this value is specified, then entities are filtered based - on the given start and end times. If an entity does not - contain the provided timestamp property or contains empty or - invalid values, then it is included. Valid data types of the - provided timestamp property are: ``TIMESTAMP``. - - See the `known - issue `__ - related to this operation. - enable_auto_population_of_timespan_config (bool): - When the job is started by a JobTrigger we will - automatically figure out a valid start_time to avoid - scanning files that have not been modified since the last - time the JobTrigger executed. This will be based on the time - of the execution of the last run of the JobTrigger or the - timespan end_time used in the last run of the JobTrigger. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - timestamp_field: 'FieldId' = proto.Field( - proto.MESSAGE, - number=3, - message='FieldId', - ) - enable_auto_population_of_timespan_config: bool = proto.Field( - proto.BOOL, - number=4, - ) - - datastore_options: 'DatastoreOptions' = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message='DatastoreOptions', - ) - cloud_storage_options: 'CloudStorageOptions' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='CloudStorageOptions', - ) - big_query_options: 'BigQueryOptions' = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message='BigQueryOptions', - ) - hybrid_options: 'HybridOptions' = proto.Field( - proto.MESSAGE, - number=9, - oneof='type', - message='HybridOptions', - ) - timespan_config: TimespanConfig = proto.Field( - proto.MESSAGE, - number=6, - message=TimespanConfig, - ) - - -class HybridOptions(proto.Message): - r"""Configuration to control jobs where the content being - inspected is outside of Google Cloud Platform. - - Attributes: - description (str): - A short description of where the data is - coming from. Will be stored once in the job. 256 - max length. - required_finding_label_keys (MutableSequence[str]): - These are labels that each inspection request must include - within their 'finding_labels' map. Request may contain - others, but any missing one of these will be rejected. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - No more than 10 keys can be required. - labels (MutableMapping[str, str]): - To organize findings, these labels will be added to each - finding. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 10 labels can be associated with a given - finding. - - Examples: - - - ``"environment" : "production"`` - - ``"pipeline" : "etl"`` - table_options (google.cloud.dlp_v2.types.TableOptions): - If the container is a table, additional - information to make findings meaningful such as - the columns that are primary keys. - """ - - description: str = proto.Field( - proto.STRING, - number=1, - ) - required_finding_label_keys: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - table_options: 'TableOptions' = proto.Field( - proto.MESSAGE, - number=4, - message='TableOptions', - ) - - -class BigQueryKey(proto.Message): - r"""Row key for identifying a record in BigQuery table. - - Attributes: - table_reference (google.cloud.dlp_v2.types.BigQueryTable): - Complete BigQuery table reference. - row_number (int): - Row number inferred at the time the table was scanned. This - value is nondeterministic, cannot be queried, and may be - null for inspection jobs. To locate findings within a table, - specify - ``inspect_job.storage_config.big_query_options.identifying_fields`` - in ``CreateDlpJobRequest``. - """ - - table_reference: 'BigQueryTable' = proto.Field( - proto.MESSAGE, - number=1, - message='BigQueryTable', - ) - row_number: int = proto.Field( - proto.INT64, - number=2, - ) - - -class DatastoreKey(proto.Message): - r"""Record key for a finding in Cloud Datastore. - - Attributes: - entity_key (google.cloud.dlp_v2.types.Key): - Datastore entity key. - """ - - entity_key: 'Key' = proto.Field( - proto.MESSAGE, - number=1, - message='Key', - ) - - -class Key(proto.Message): - r"""A unique identifier for a Datastore entity. - If a key's partition ID or any of its path kinds or names are - reserved/read-only, the key is reserved/read-only. - A reserved/read-only key is forbidden in certain documented - contexts. - - Attributes: - partition_id (google.cloud.dlp_v2.types.PartitionId): - Entities are partitioned into subsets, - currently identified by a project ID and - namespace ID. Queries are scoped to a single - partition. - path (MutableSequence[google.cloud.dlp_v2.types.Key.PathElement]): - The entity path. An entity path consists of one or more - elements composed of a kind and a string or numerical - identifier, which identify entities. The first element - identifies a *root entity*, the second element identifies a - *child* of the root entity, the third element identifies a - child of the second entity, and so forth. The entities - identified by all prefixes of the path are called the - element's *ancestors*. - - A path can never be empty, and a path can have at most 100 - elements. - """ - - class PathElement(proto.Message): - r"""A (kind, ID/name) pair used to construct a key path. - If either name or ID is set, the element is complete. If neither - is set, the element is incomplete. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - kind (str): - The kind of the entity. A kind matching regex ``__.*__`` is - reserved/read-only. A kind must not contain more than 1500 - bytes when UTF-8 encoded. Cannot be ``""``. - id (int): - The auto-allocated ID of the entity. - Never equal to zero. Values less than zero are - discouraged and may not be supported in the - future. - - This field is a member of `oneof`_ ``id_type``. - name (str): - The name of the entity. A name matching regex ``__.*__`` is - reserved/read-only. A name must not be more than 1500 bytes - when UTF-8 encoded. Cannot be ``""``. - - This field is a member of `oneof`_ ``id_type``. - """ - - kind: str = proto.Field( - proto.STRING, - number=1, - ) - id: int = proto.Field( - proto.INT64, - number=2, - oneof='id_type', - ) - name: str = proto.Field( - proto.STRING, - number=3, - oneof='id_type', - ) - - partition_id: 'PartitionId' = proto.Field( - proto.MESSAGE, - number=1, - message='PartitionId', - ) - path: MutableSequence[PathElement] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=PathElement, - ) - - -class RecordKey(proto.Message): - r"""Message for a unique key indicating a record that contains a - finding. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - datastore_key (google.cloud.dlp_v2.types.DatastoreKey): - - This field is a member of `oneof`_ ``type``. - big_query_key (google.cloud.dlp_v2.types.BigQueryKey): - - This field is a member of `oneof`_ ``type``. - id_values (MutableSequence[str]): - Values of identifying columns in the given row. Order of - values matches the order of ``identifying_fields`` specified - in the scanning request. - """ - - datastore_key: 'DatastoreKey' = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message='DatastoreKey', - ) - big_query_key: 'BigQueryKey' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='BigQueryKey', - ) - id_values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - - -class BigQueryTable(proto.Message): - r"""Message defining the location of a BigQuery table. A table is - uniquely identified by its project_id, dataset_id, and table_name. - Within a query a table is often referenced with a string in the - format of: ``:.`` or - ``..``. - - Attributes: - project_id (str): - The Google Cloud Platform project ID of the - project containing the table. If omitted, - project ID is inferred from the API call. - dataset_id (str): - Dataset ID of the table. - table_id (str): - Name of the table. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - dataset_id: str = proto.Field( - proto.STRING, - number=2, - ) - table_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class BigQueryField(proto.Message): - r"""Message defining a field of a BigQuery table. - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Source table of the field. - field (google.cloud.dlp_v2.types.FieldId): - Designated field in the BigQuery table. - """ - - table: 'BigQueryTable' = proto.Field( - proto.MESSAGE, - number=1, - message='BigQueryTable', - ) - field: 'FieldId' = proto.Field( - proto.MESSAGE, - number=2, - message='FieldId', - ) - - -class EntityId(proto.Message): - r"""An entity in a dataset is a field or set of fields that correspond - to a single person. For example, in medical records the ``EntityId`` - might be a patient identifier, or for financial records it might be - an account identifier. This message is used when generalizations or - analysis must take into account that multiple rows correspond to the - same entity. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Composite key indicating which field contains - the entity identifier. - """ - - field: 'FieldId' = proto.Field( - proto.MESSAGE, - number=1, - message='FieldId', - ) - - -class TableOptions(proto.Message): - r"""Instructions regarding the table content being inspected. - - Attributes: - identifying_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - The columns that are the primary keys for - table objects included in ContentItem. A copy of - this cell's value will stored alongside - alongside each finding so that the finding can - be traced to the specific row it came from. No - more than 3 may be provided. - """ - - identifying_fields: MutableSequence['FieldId'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='FieldId', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/mypy.ini b/owl-bot-staging/v2/mypy.ini deleted file mode 100644 index 574c5aed..00000000 --- a/owl-bot-staging/v2/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/v2/noxfile.py b/owl-bot-staging/v2/noxfile.py deleted file mode 100644 index 6b1462df..00000000 --- a/owl-bot-staging/v2/noxfile.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "lint_setup_py", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/dlp_v2/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py deleted file mode 100644 index e4371abf..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ActivateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ActivateJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_activate_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ActivateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.activate_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ActivateJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py deleted file mode 100644 index c0b4fac1..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ActivateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ActivateJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_activate_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ActivateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.activate_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ActivateJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py deleted file mode 100644 index d8190299..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CancelDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_cancel_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CancelDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.cancel_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_CancelDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py deleted file mode 100644 index 7475d6fa..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CancelDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_cancel_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CancelDlpJobRequest( - name="name_value", - ) - - # Make the request - client.cancel_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_CancelDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py deleted file mode 100644 index 81ad2519..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDeidentifyTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py deleted file mode 100644 index b394f634..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDeidentifyTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py deleted file mode 100644 index 28770717..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDlpJobRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py deleted file mode 100644 index 779754f6..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDlpJobRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py deleted file mode 100644 index aeb40676..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateInspectTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateInspectTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py deleted file mode 100644 index 0e344b36..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateInspectTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateInspectTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py deleted file mode 100644 index 3e82b8f3..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - job_trigger = dlp_v2.JobTrigger() - job_trigger.status = "CANCELLED" - - request = dlp_v2.CreateJobTriggerRequest( - parent="parent_value", - job_trigger=job_trigger, - ) - - # Make the request - response = await client.create_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py deleted file mode 100644 index ebb74284..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - job_trigger = dlp_v2.JobTrigger() - job_trigger.status = "CANCELLED" - - request = dlp_v2.CreateJobTriggerRequest( - parent="parent_value", - job_trigger=job_trigger, - ) - - # Make the request - response = client.create_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py deleted file mode 100644 index cae6db89..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateStoredInfoType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateStoredInfoTypeRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py deleted file mode 100644 index d59a301d..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateStoredInfoType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateStoredInfoTypeRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py deleted file mode 100644 index 4903b032..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeidentifyContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeidentifyContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_deidentify_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeidentifyContentRequest( - ) - - # Make the request - response = await client.deidentify_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_DeidentifyContent_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py deleted file mode 100644 index 2422616c..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeidentifyContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeidentifyContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_deidentify_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeidentifyContentRequest( - ) - - # Make the request - response = client.deidentify_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_DeidentifyContent_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py deleted file mode 100644 index f544f12d..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_deidentify_template(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py deleted file mode 100644 index a33f3b26..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_deidentify_template(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py deleted file mode 100644 index 8737125b..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.delete_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py deleted file mode 100644 index bb0ce9df..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDlpJobRequest( - name="name_value", - ) - - # Make the request - client.delete_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py deleted file mode 100644 index f0aec8eb..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteInspectTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteInspectTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_inspect_template(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py deleted file mode 100644 index c908d867..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteInspectTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteInspectTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_inspect_template(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py deleted file mode 100644 index 3784ee3e..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteJobTriggerRequest( - name="name_value", - ) - - # Make the request - await client.delete_job_trigger(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py deleted file mode 100644 index 9f4405da..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteJobTriggerRequest( - name="name_value", - ) - - # Make the request - client.delete_job_trigger(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py deleted file mode 100644 index 652d88ff..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteStoredInfoType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - await client.delete_stored_info_type(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py deleted file mode 100644 index 7e37ce36..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteStoredInfoType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - client.delete_stored_info_type(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py deleted file mode 100644 index 869504da..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FinishDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_FinishDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_finish_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.FinishDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.finish_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_FinishDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py deleted file mode 100644 index 1b694f90..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FinishDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_FinishDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_finish_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.FinishDlpJobRequest( - name="name_value", - ) - - # Make the request - client.finish_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_FinishDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py deleted file mode 100644 index fc1570d3..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetDeidentifyTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py deleted file mode 100644 index bb1e1986..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py deleted file mode 100644 index 2065aa85..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetDlpJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py deleted file mode 100644 index 13959bde..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetDlpJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py deleted file mode 100644 index 1a9c9649..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetInspectTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py deleted file mode 100644 index 112e3d83..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetInspectTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py deleted file mode 100644 index 248184c7..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py deleted file mode 100644 index 9c6cdb3a..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.get_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py deleted file mode 100644 index a7820fe2..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetStoredInfoType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py deleted file mode 100644 index d0b0a44c..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetStoredInfoType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = client.get_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py deleted file mode 100644 index e9f9be5a..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for HybridInspectDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_HybridInspectDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_hybrid_inspect_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectDlpJobRequest( - name="name_value", - ) - - # Make the request - response = await client.hybrid_inspect_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_HybridInspectDlpJob_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py deleted file mode 100644 index 2bfd7fe1..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for HybridInspectDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_HybridInspectDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_hybrid_inspect_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectDlpJobRequest( - name="name_value", - ) - - # Make the request - response = client.hybrid_inspect_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_HybridInspectDlpJob_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py deleted file mode 100644 index dbdd91c2..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for HybridInspectJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_HybridInspectJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_hybrid_inspect_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.hybrid_inspect_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_HybridInspectJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py deleted file mode 100644 index a9c4c85e..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for HybridInspectJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_hybrid_inspect_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.hybrid_inspect_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py deleted file mode 100644 index 3f24588b..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for InspectContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_InspectContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_inspect_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.InspectContentRequest( - ) - - # Make the request - response = await client.inspect_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_InspectContent_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py deleted file mode 100644 index 4b5a10f3..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for InspectContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_InspectContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_inspect_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.InspectContentRequest( - ) - - # Make the request - response = client.inspect_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_InspectContent_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py deleted file mode 100644 index d1a40dc0..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDeidentifyTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListDeidentifyTemplates_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_deidentify_templates(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListDeidentifyTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_deidentify_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListDeidentifyTemplates_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py deleted file mode 100644 index 6a01f0fb..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDeidentifyTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_deidentify_templates(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListDeidentifyTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_deidentify_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py deleted file mode 100644 index 57c790d8..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDlpJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListDlpJobs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_dlp_jobs(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListDlpJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_dlp_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListDlpJobs_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py deleted file mode 100644 index 7d06c237..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDlpJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListDlpJobs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_dlp_jobs(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListDlpJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_dlp_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListDlpJobs_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py deleted file mode 100644 index 16b871f8..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInfoTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListInfoTypes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_info_types(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListInfoTypesRequest( - ) - - # Make the request - response = await client.list_info_types(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ListInfoTypes_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py deleted file mode 100644 index 9e3ca167..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInfoTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListInfoTypes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_info_types(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListInfoTypesRequest( - ) - - # Make the request - response = client.list_info_types(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ListInfoTypes_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py deleted file mode 100644 index 6e405a4f..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInspectTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListInspectTemplates_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_inspect_templates(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListInspectTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_inspect_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListInspectTemplates_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py deleted file mode 100644 index 71673677..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInspectTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListInspectTemplates_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_inspect_templates(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListInspectTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_inspect_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListInspectTemplates_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py deleted file mode 100644 index e8c0281f..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobTriggers -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListJobTriggers_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_job_triggers(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListJobTriggersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_triggers(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListJobTriggers_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py deleted file mode 100644 index 0f9141c0..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobTriggers -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListJobTriggers_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_job_triggers(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListJobTriggersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_triggers(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListJobTriggers_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py deleted file mode 100644 index 460c99c4..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListStoredInfoTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListStoredInfoTypes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_stored_info_types(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListStoredInfoTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_stored_info_types(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListStoredInfoTypes_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py deleted file mode 100644 index 1ad1796e..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListStoredInfoTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListStoredInfoTypes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_stored_info_types(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListStoredInfoTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_stored_info_types(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListStoredInfoTypes_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py deleted file mode 100644 index a7a0d502..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RedactImage -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_RedactImage_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_redact_image(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.RedactImageRequest( - ) - - # Make the request - response = await client.redact_image(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_RedactImage_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py deleted file mode 100644 index 272bdb80..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RedactImage -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_RedactImage_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_redact_image(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.RedactImageRequest( - ) - - # Make the request - response = client.redact_image(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_RedactImage_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py deleted file mode 100644 index 401f62df..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReidentifyContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ReidentifyContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_reidentify_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ReidentifyContentRequest( - parent="parent_value", - ) - - # Make the request - response = await client.reidentify_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ReidentifyContent_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py deleted file mode 100644 index 9e654be9..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReidentifyContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ReidentifyContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_reidentify_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ReidentifyContentRequest( - parent="parent_value", - ) - - # Make the request - response = client.reidentify_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ReidentifyContent_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py deleted file mode 100644 index 8b32186c..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_update_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.update_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py deleted file mode 100644 index e3296531..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_update_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.update_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py deleted file mode 100644 index 8e062116..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateInspectTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_update_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.update_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateInspectTemplate_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py deleted file mode 100644 index 332c5de6..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateInspectTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_update_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.update_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateInspectTemplate_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py deleted file mode 100644 index 58baaeeb..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_update_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.update_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateJobTrigger_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py deleted file mode 100644 index 3694b5ff..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_update_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.update_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateJobTrigger_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py deleted file mode 100644 index d5658d32..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateStoredInfoType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_update_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.update_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateStoredInfoType_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py b/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py deleted file mode 100644 index 9471180b..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateStoredInfoType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_update_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = client.update_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateStoredInfoType_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json b/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json deleted file mode 100644 index 956f9eab..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json +++ /dev/null @@ -1,5503 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.privacy.dlp.v2", - "version": "v2" - } - ], - "language": "PYTHON", - "name": "google-cloud-dlp", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.activate_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ActivateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ActivateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ActivateJobTriggerRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "activate_job_trigger" - }, - "description": "Sample for ActivateJobTrigger", - "file": "dlp_v2_generated_dlp_service_activate_job_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ActivateJobTrigger_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_activate_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.activate_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ActivateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ActivateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ActivateJobTriggerRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "activate_job_trigger" - }, - "description": "Sample for ActivateJobTrigger", - "file": "dlp_v2_generated_dlp_service_activate_job_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ActivateJobTrigger_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_activate_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.cancel_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CancelDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CancelDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CancelDlpJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "cancel_dlp_job" - }, - "description": "Sample for CancelDlpJob", - "file": "dlp_v2_generated_dlp_service_cancel_dlp_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CancelDlpJob_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_cancel_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.cancel_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CancelDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CancelDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CancelDlpJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "cancel_dlp_job" - }, - "description": "Sample for CancelDlpJob", - "file": "dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CancelDlpJob_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "deidentify_template", - "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "create_deidentify_template" - }, - "description": "Sample for CreateDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_create_deidentify_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_deidentify_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "deidentify_template", - "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "create_deidentify_template" - }, - "description": "Sample for CreateDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_create_deidentify_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_deidentify_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateDlpJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "inspect_job", - "type": "google.cloud.dlp_v2.types.InspectJobConfig" - }, - { - "name": "risk_job", - "type": "google.cloud.dlp_v2.types.RiskAnalysisJobConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "create_dlp_job" - }, - "description": "Sample for CreateDlpJob", - "file": "dlp_v2_generated_dlp_service_create_dlp_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateDlpJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateDlpJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "inspect_job", - "type": "google.cloud.dlp_v2.types.InspectJobConfig" - }, - { - "name": "risk_job", - "type": "google.cloud.dlp_v2.types.RiskAnalysisJobConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "create_dlp_job" - }, - "description": "Sample for CreateDlpJob", - "file": "dlp_v2_generated_dlp_service_create_dlp_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateDlpJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateInspectTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "inspect_template", - "type": "google.cloud.dlp_v2.types.InspectTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "create_inspect_template" - }, - "description": "Sample for CreateInspectTemplate", - "file": "dlp_v2_generated_dlp_service_create_inspect_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateInspectTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_inspect_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateInspectTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "inspect_template", - "type": "google.cloud.dlp_v2.types.InspectTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "create_inspect_template" - }, - "description": "Sample for CreateInspectTemplate", - "file": "dlp_v2_generated_dlp_service_create_inspect_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateInspectTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_inspect_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateJobTriggerRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job_trigger", - "type": "google.cloud.dlp_v2.types.JobTrigger" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "create_job_trigger" - }, - "description": "Sample for CreateJobTrigger", - "file": "dlp_v2_generated_dlp_service_create_job_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateJobTrigger_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateJobTriggerRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job_trigger", - "type": "google.cloud.dlp_v2.types.JobTrigger" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "create_job_trigger" - }, - "description": "Sample for CreateJobTrigger", - "file": "dlp_v2_generated_dlp_service_create_job_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateJobTrigger_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "config", - "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "create_stored_info_type" - }, - "description": "Sample for CreateStoredInfoType", - "file": "dlp_v2_generated_dlp_service_create_stored_info_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateStoredInfoType_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_stored_info_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "config", - "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "create_stored_info_type" - }, - "description": "Sample for CreateStoredInfoType", - "file": "dlp_v2_generated_dlp_service_create_stored_info_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateStoredInfoType_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_stored_info_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.deidentify_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeidentifyContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeidentifyContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeidentifyContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyContentResponse", - "shortName": "deidentify_content" - }, - "description": "Sample for DeidentifyContent", - "file": "dlp_v2_generated_dlp_service_deidentify_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeidentifyContent_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_deidentify_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.deidentify_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeidentifyContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeidentifyContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeidentifyContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyContentResponse", - "shortName": "deidentify_content" - }, - "description": "Sample for DeidentifyContent", - "file": "dlp_v2_generated_dlp_service_deidentify_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeidentifyContent_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_deidentify_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_deidentify_template" - }, - "description": "Sample for DeleteDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_delete_deidentify_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_deidentify_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_deidentify_template" - }, - "description": "Sample for DeleteDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_dlp_job" - }, - "description": "Sample for DeleteDlpJob", - "file": "dlp_v2_generated_dlp_service_delete_dlp_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteDlpJob_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_dlp_job" - }, - "description": "Sample for DeleteDlpJob", - "file": "dlp_v2_generated_dlp_service_delete_dlp_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteDlpJob_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_inspect_template" - }, - "description": "Sample for DeleteInspectTemplate", - "file": "dlp_v2_generated_dlp_service_delete_inspect_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteInspectTemplate_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_inspect_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_inspect_template" - }, - "description": "Sample for DeleteInspectTemplate", - "file": "dlp_v2_generated_dlp_service_delete_inspect_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteInspectTemplate_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_inspect_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_job_trigger" - }, - "description": "Sample for DeleteJobTrigger", - "file": "dlp_v2_generated_dlp_service_delete_job_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteJobTrigger_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_job_trigger" - }, - "description": "Sample for DeleteJobTrigger", - "file": "dlp_v2_generated_dlp_service_delete_job_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteJobTrigger_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_stored_info_type" - }, - "description": "Sample for DeleteStoredInfoType", - "file": "dlp_v2_generated_dlp_service_delete_stored_info_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteStoredInfoType_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_stored_info_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_stored_info_type" - }, - "description": "Sample for DeleteStoredInfoType", - "file": "dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteStoredInfoType_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.finish_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.FinishDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "FinishDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.FinishDlpJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "finish_dlp_job" - }, - "description": "Sample for FinishDlpJob", - "file": "dlp_v2_generated_dlp_service_finish_dlp_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_FinishDlpJob_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_finish_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.finish_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.FinishDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "FinishDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.FinishDlpJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "finish_dlp_job" - }, - "description": "Sample for FinishDlpJob", - "file": "dlp_v2_generated_dlp_service_finish_dlp_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_FinishDlpJob_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_finish_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "get_deidentify_template" - }, - "description": "Sample for GetDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_get_deidentify_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetDeidentifyTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_deidentify_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "get_deidentify_template" - }, - "description": "Sample for GetDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_get_deidentify_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_deidentify_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "get_dlp_job" - }, - "description": "Sample for GetDlpJob", - "file": "dlp_v2_generated_dlp_service_get_dlp_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetDlpJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "get_dlp_job" - }, - "description": "Sample for GetDlpJob", - "file": "dlp_v2_generated_dlp_service_get_dlp_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetDlpJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "get_inspect_template" - }, - "description": "Sample for GetInspectTemplate", - "file": "dlp_v2_generated_dlp_service_get_inspect_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetInspectTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_inspect_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "get_inspect_template" - }, - "description": "Sample for GetInspectTemplate", - "file": "dlp_v2_generated_dlp_service_get_inspect_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetInspectTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_inspect_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "get_job_trigger" - }, - "description": "Sample for GetJobTrigger", - "file": "dlp_v2_generated_dlp_service_get_job_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetJobTrigger_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "get_job_trigger" - }, - "description": "Sample for GetJobTrigger", - "file": "dlp_v2_generated_dlp_service_get_job_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetJobTrigger_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "get_stored_info_type" - }, - "description": "Sample for GetStoredInfoType", - "file": "dlp_v2_generated_dlp_service_get_stored_info_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetStoredInfoType_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_stored_info_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "get_stored_info_type" - }, - "description": "Sample for GetStoredInfoType", - "file": "dlp_v2_generated_dlp_service_get_stored_info_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetStoredInfoType_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_stored_info_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.hybrid_inspect_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "HybridInspectDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.HybridInspectDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", - "shortName": "hybrid_inspect_dlp_job" - }, - "description": "Sample for HybridInspectDlpJob", - "file": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_HybridInspectDlpJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.hybrid_inspect_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "HybridInspectDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.HybridInspectDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", - "shortName": "hybrid_inspect_dlp_job" - }, - "description": "Sample for HybridInspectDlpJob", - "file": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_HybridInspectDlpJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.hybrid_inspect_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "HybridInspectJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", - "shortName": "hybrid_inspect_job_trigger" - }, - "description": "Sample for HybridInspectJobTrigger", - "file": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_HybridInspectJobTrigger_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.hybrid_inspect_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "HybridInspectJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", - "shortName": "hybrid_inspect_job_trigger" - }, - "description": "Sample for HybridInspectJobTrigger", - "file": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.inspect_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.InspectContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "InspectContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.InspectContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectContentResponse", - "shortName": "inspect_content" - }, - "description": "Sample for InspectContent", - "file": "dlp_v2_generated_dlp_service_inspect_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_InspectContent_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_inspect_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.inspect_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.InspectContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "InspectContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.InspectContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectContentResponse", - "shortName": "inspect_content" - }, - "description": "Sample for InspectContent", - "file": "dlp_v2_generated_dlp_service_inspect_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_InspectContent_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_inspect_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_deidentify_templates", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListDeidentifyTemplates", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListDeidentifyTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager", - "shortName": "list_deidentify_templates" - }, - "description": "Sample for ListDeidentifyTemplates", - "file": "dlp_v2_generated_dlp_service_list_deidentify_templates_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListDeidentifyTemplates_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_deidentify_templates_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_deidentify_templates", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListDeidentifyTemplates", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListDeidentifyTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager", - "shortName": "list_deidentify_templates" - }, - "description": "Sample for ListDeidentifyTemplates", - "file": "dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_dlp_jobs", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListDlpJobs", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListDlpJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListDlpJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager", - "shortName": "list_dlp_jobs" - }, - "description": "Sample for ListDlpJobs", - "file": "dlp_v2_generated_dlp_service_list_dlp_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListDlpJobs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_dlp_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_dlp_jobs", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListDlpJobs", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListDlpJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListDlpJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager", - "shortName": "list_dlp_jobs" - }, - "description": "Sample for ListDlpJobs", - "file": "dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListDlpJobs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_info_types", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListInfoTypes", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListInfoTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListInfoTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ListInfoTypesResponse", - "shortName": "list_info_types" - }, - "description": "Sample for ListInfoTypes", - "file": "dlp_v2_generated_dlp_service_list_info_types_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListInfoTypes_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_info_types_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_info_types", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListInfoTypes", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListInfoTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListInfoTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ListInfoTypesResponse", - "shortName": "list_info_types" - }, - "description": "Sample for ListInfoTypes", - "file": "dlp_v2_generated_dlp_service_list_info_types_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListInfoTypes_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_info_types_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_inspect_templates", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListInspectTemplates", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListInspectTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListInspectTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager", - "shortName": "list_inspect_templates" - }, - "description": "Sample for ListInspectTemplates", - "file": "dlp_v2_generated_dlp_service_list_inspect_templates_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListInspectTemplates_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_inspect_templates_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_inspect_templates", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListInspectTemplates", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListInspectTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListInspectTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager", - "shortName": "list_inspect_templates" - }, - "description": "Sample for ListInspectTemplates", - "file": "dlp_v2_generated_dlp_service_list_inspect_templates_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListInspectTemplates_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_inspect_templates_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_job_triggers", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListJobTriggers", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListJobTriggers" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListJobTriggersRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager", - "shortName": "list_job_triggers" - }, - "description": "Sample for ListJobTriggers", - "file": "dlp_v2_generated_dlp_service_list_job_triggers_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListJobTriggers_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_job_triggers_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_job_triggers", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListJobTriggers", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListJobTriggers" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListJobTriggersRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager", - "shortName": "list_job_triggers" - }, - "description": "Sample for ListJobTriggers", - "file": "dlp_v2_generated_dlp_service_list_job_triggers_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListJobTriggers_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_job_triggers_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_stored_info_types", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListStoredInfoTypes", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListStoredInfoTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListStoredInfoTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager", - "shortName": "list_stored_info_types" - }, - "description": "Sample for ListStoredInfoTypes", - "file": "dlp_v2_generated_dlp_service_list_stored_info_types_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListStoredInfoTypes_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_stored_info_types_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_stored_info_types", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListStoredInfoTypes", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListStoredInfoTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListStoredInfoTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager", - "shortName": "list_stored_info_types" - }, - "description": "Sample for ListStoredInfoTypes", - "file": "dlp_v2_generated_dlp_service_list_stored_info_types_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListStoredInfoTypes_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_stored_info_types_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.redact_image", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.RedactImage", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "RedactImage" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.RedactImageRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.RedactImageResponse", - "shortName": "redact_image" - }, - "description": "Sample for RedactImage", - "file": "dlp_v2_generated_dlp_service_redact_image_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_RedactImage_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_redact_image_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.redact_image", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.RedactImage", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "RedactImage" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.RedactImageRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.RedactImageResponse", - "shortName": "redact_image" - }, - "description": "Sample for RedactImage", - "file": "dlp_v2_generated_dlp_service_redact_image_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_RedactImage_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_redact_image_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.reidentify_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ReidentifyContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ReidentifyContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ReidentifyContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ReidentifyContentResponse", - "shortName": "reidentify_content" - }, - "description": "Sample for ReidentifyContent", - "file": "dlp_v2_generated_dlp_service_reidentify_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ReidentifyContent_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_reidentify_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.reidentify_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ReidentifyContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ReidentifyContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ReidentifyContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ReidentifyContentResponse", - "shortName": "reidentify_content" - }, - "description": "Sample for ReidentifyContent", - "file": "dlp_v2_generated_dlp_service_reidentify_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ReidentifyContent_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_reidentify_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "deidentify_template", - "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "update_deidentify_template" - }, - "description": "Sample for UpdateDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_update_deidentify_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_deidentify_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "deidentify_template", - "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "update_deidentify_template" - }, - "description": "Sample for UpdateDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_update_deidentify_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_deidentify_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "inspect_template", - "type": "google.cloud.dlp_v2.types.InspectTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "update_inspect_template" - }, - "description": "Sample for UpdateInspectTemplate", - "file": "dlp_v2_generated_dlp_service_update_inspect_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateInspectTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_inspect_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "inspect_template", - "type": "google.cloud.dlp_v2.types.InspectTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "update_inspect_template" - }, - "description": "Sample for UpdateInspectTemplate", - "file": "dlp_v2_generated_dlp_service_update_inspect_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateInspectTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_inspect_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "job_trigger", - "type": "google.cloud.dlp_v2.types.JobTrigger" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "update_job_trigger" - }, - "description": "Sample for UpdateJobTrigger", - "file": "dlp_v2_generated_dlp_service_update_job_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateJobTrigger_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "job_trigger", - "type": "google.cloud.dlp_v2.types.JobTrigger" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "update_job_trigger" - }, - "description": "Sample for UpdateJobTrigger", - "file": "dlp_v2_generated_dlp_service_update_job_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateJobTrigger_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "config", - "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "update_stored_info_type" - }, - "description": "Sample for UpdateStoredInfoType", - "file": "dlp_v2_generated_dlp_service_update_stored_info_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateStoredInfoType_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_stored_info_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "config", - "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "update_stored_info_type" - }, - "description": "Sample for UpdateStoredInfoType", - "file": "dlp_v2_generated_dlp_service_update_stored_info_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateStoredInfoType_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_stored_info_type_sync.py" - } - ] -} diff --git a/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py b/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py deleted file mode 100644 index 9adcd0d5..00000000 --- a/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py +++ /dev/null @@ -1,209 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class dlpCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'activate_job_trigger': ('name', ), - 'cancel_dlp_job': ('name', ), - 'create_deidentify_template': ('parent', 'deidentify_template', 'template_id', 'location_id', ), - 'create_dlp_job': ('parent', 'inspect_job', 'risk_job', 'job_id', 'location_id', ), - 'create_inspect_template': ('parent', 'inspect_template', 'template_id', 'location_id', ), - 'create_job_trigger': ('parent', 'job_trigger', 'trigger_id', 'location_id', ), - 'create_stored_info_type': ('parent', 'config', 'stored_info_type_id', 'location_id', ), - 'deidentify_content': ('parent', 'deidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'deidentify_template_name', 'location_id', ), - 'delete_deidentify_template': ('name', ), - 'delete_dlp_job': ('name', ), - 'delete_inspect_template': ('name', ), - 'delete_job_trigger': ('name', ), - 'delete_stored_info_type': ('name', ), - 'finish_dlp_job': ('name', ), - 'get_deidentify_template': ('name', ), - 'get_dlp_job': ('name', ), - 'get_inspect_template': ('name', ), - 'get_job_trigger': ('name', ), - 'get_stored_info_type': ('name', ), - 'hybrid_inspect_dlp_job': ('name', 'hybrid_item', ), - 'hybrid_inspect_job_trigger': ('name', 'hybrid_item', ), - 'inspect_content': ('parent', 'inspect_config', 'item', 'inspect_template_name', 'location_id', ), - 'list_deidentify_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'list_dlp_jobs': ('parent', 'filter', 'page_size', 'page_token', 'type_', 'order_by', 'location_id', ), - 'list_info_types': ('parent', 'language_code', 'filter', 'location_id', ), - 'list_inspect_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'list_job_triggers': ('parent', 'page_token', 'page_size', 'order_by', 'filter', 'type_', 'location_id', ), - 'list_stored_info_types': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'redact_image': ('parent', 'location_id', 'inspect_config', 'image_redaction_configs', 'include_findings', 'byte_item', ), - 'reidentify_content': ('parent', 'reidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'reidentify_template_name', 'location_id', ), - 'update_deidentify_template': ('name', 'deidentify_template', 'update_mask', ), - 'update_inspect_template': ('name', 'inspect_template', 'update_mask', ), - 'update_job_trigger': ('name', 'job_trigger', 'update_mask', ), - 'update_stored_info_type': ('name', 'config', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=dlpCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the dlp client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v2/setup.py b/owl-bot-staging/v2/setup.py deleted file mode 100644 index 2b4eb21b..00000000 --- a/owl-bot-staging/v2/setup.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-dlp' - - -description = "Google Cloud Dlp API client library" - -version = {} -with open(os.path.join(package_root, 'google/cloud/dlp/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -url = "https://github.com/googleapis/python-dlp" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") -] - -namespaces = ["google", "google.cloud"] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - namespace_packages=namespaces, - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/v2/testing/constraints-3.10.txt b/owl-bot-staging/v2/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.11.txt b/owl-bot-staging/v2/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.12.txt b/owl-bot-staging/v2/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.7.txt b/owl-bot-staging/v2/testing/constraints-3.7.txt deleted file mode 100644 index 6c44adfe..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.7.txt +++ /dev/null @@ -1,9 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -proto-plus==1.22.0 -protobuf==3.19.5 diff --git a/owl-bot-staging/v2/testing/constraints-3.8.txt b/owl-bot-staging/v2/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v2/testing/constraints-3.9.txt b/owl-bot-staging/v2/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v2/tests/__init__.py b/owl-bot-staging/v2/tests/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v2/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/__init__.py b/owl-bot-staging/v2/tests/unit/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v2/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py deleted file mode 100644 index 64618efd..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py +++ /dev/null @@ -1,17404 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dlp_v2.services.dlp_service import DlpServiceAsyncClient -from google.cloud.dlp_v2.services.dlp_service import DlpServiceClient -from google.cloud.dlp_v2.services.dlp_service import pagers -from google.cloud.dlp_v2.services.dlp_service import transports -from google.cloud.dlp_v2.types import dlp -from google.cloud.dlp_v2.types import storage -from google.cloud.location import locations_pb2 -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import date_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore -from google.type import timeofday_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DlpServiceClient._get_default_mtls_endpoint(None) is None - assert DlpServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DlpServiceClient, "grpc"), - (DlpServiceAsyncClient, "grpc_asyncio"), - (DlpServiceClient, "rest"), -]) -def test_dlp_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dlp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dlp.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DlpServiceGrpcTransport, "grpc"), - (transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.DlpServiceRestTransport, "rest"), -]) -def test_dlp_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DlpServiceClient, "grpc"), - (DlpServiceAsyncClient, "grpc_asyncio"), - (DlpServiceClient, "rest"), -]) -def test_dlp_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dlp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dlp.googleapis.com' - ) - - -def test_dlp_service_client_get_transport_class(): - transport = DlpServiceClient.get_transport_class() - available_transports = [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceRestTransport, - ] - assert transport in available_transports - - transport = DlpServiceClient.get_transport_class("grpc") - assert transport == transports.DlpServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), -]) -@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) -@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) -def test_dlp_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "true"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "false"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "true"), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) -@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_dlp_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DlpServiceClient, DlpServiceAsyncClient -]) -@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) -@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) -def test_dlp_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), -]) -def test_dlp_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", grpc_helpers), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest", None), -]) -def test_dlp_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_dlp_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DlpServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", grpc_helpers), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_dlp_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dlp.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dlp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.InspectContentRequest, - dict, -]) -def test_inspect_content(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectContentResponse( - ) - response = client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.InspectContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectContentResponse) - - -def test_inspect_content_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - client.inspect_content() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.InspectContentRequest() - -@pytest.mark.asyncio -async def test_inspect_content_async(transport: str = 'grpc_asyncio', request_type=dlp.InspectContentRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse( - )) - response = await client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.InspectContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectContentResponse) - - -@pytest.mark.asyncio -async def test_inspect_content_async_from_dict(): - await test_inspect_content_async(request_type=dict) - - -def test_inspect_content_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.InspectContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - call.return_value = dlp.InspectContentResponse() - client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_inspect_content_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.InspectContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse()) - await client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.RedactImageRequest, - dict, -]) -def test_redact_image(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.RedactImageResponse( - redacted_image=b'redacted_image_blob', - extracted_text='extracted_text_value', - ) - response = client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.RedactImageRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.RedactImageResponse) - assert response.redacted_image == b'redacted_image_blob' - assert response.extracted_text == 'extracted_text_value' - - -def test_redact_image_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - client.redact_image() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.RedactImageRequest() - -@pytest.mark.asyncio -async def test_redact_image_async(transport: str = 'grpc_asyncio', request_type=dlp.RedactImageRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse( - redacted_image=b'redacted_image_blob', - extracted_text='extracted_text_value', - )) - response = await client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.RedactImageRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.RedactImageResponse) - assert response.redacted_image == b'redacted_image_blob' - assert response.extracted_text == 'extracted_text_value' - - -@pytest.mark.asyncio -async def test_redact_image_async_from_dict(): - await test_redact_image_async(request_type=dict) - - -def test_redact_image_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.RedactImageRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - call.return_value = dlp.RedactImageResponse() - client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_redact_image_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.RedactImageRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse()) - await client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.DeidentifyContentRequest, - dict, -]) -def test_deidentify_content(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyContentResponse( - ) - response = client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeidentifyContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyContentResponse) - - -def test_deidentify_content_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - client.deidentify_content() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeidentifyContentRequest() - -@pytest.mark.asyncio -async def test_deidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.DeidentifyContentRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse( - )) - response = await client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeidentifyContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyContentResponse) - - -@pytest.mark.asyncio -async def test_deidentify_content_async_from_dict(): - await test_deidentify_content_async(request_type=dict) - - -def test_deidentify_content_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeidentifyContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - call.return_value = dlp.DeidentifyContentResponse() - client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_deidentify_content_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeidentifyContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse()) - await client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.ReidentifyContentRequest, - dict, -]) -def test_reidentify_content(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ReidentifyContentResponse( - ) - response = client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ReidentifyContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ReidentifyContentResponse) - - -def test_reidentify_content_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - client.reidentify_content() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ReidentifyContentRequest() - -@pytest.mark.asyncio -async def test_reidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.ReidentifyContentRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse( - )) - response = await client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ReidentifyContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ReidentifyContentResponse) - - -@pytest.mark.asyncio -async def test_reidentify_content_async_from_dict(): - await test_reidentify_content_async(request_type=dict) - - -def test_reidentify_content_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ReidentifyContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - call.return_value = dlp.ReidentifyContentResponse() - client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_reidentify_content_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ReidentifyContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse()) - await client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.ListInfoTypesRequest, - dict, -]) -def test_list_info_types(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInfoTypesResponse( - ) - response = client.list_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInfoTypesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ListInfoTypesResponse) - - -def test_list_info_types_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - client.list_info_types() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInfoTypesRequest() - -@pytest.mark.asyncio -async def test_list_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInfoTypesRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse( - )) - response = await client.list_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInfoTypesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ListInfoTypesResponse) - - -@pytest.mark.asyncio -async def test_list_info_types_async_from_dict(): - await test_list_info_types_async(request_type=dict) - - -def test_list_info_types_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInfoTypesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_info_types_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_info_types( - dlp.ListInfoTypesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_info_types_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInfoTypesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_info_types_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_info_types( - dlp.ListInfoTypesRequest(), - parent='parent_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateInspectTemplateRequest, - dict, -]) -def test_create_inspect_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_inspect_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - client.create_inspect_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateInspectTemplateRequest() - -@pytest.mark.asyncio -async def test_create_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_create_inspect_template_async_from_dict(): - await test_create_inspect_template_async(request_type=dict) - - -def test_create_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateInspectTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - call.return_value = dlp.InspectTemplate() - client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateInspectTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - await client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_inspect_template( - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].inspect_template - mock_val = dlp.InspectTemplate(name='name_value') - assert arg == mock_val - - -def test_create_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_inspect_template( - dlp.CreateInspectTemplateRequest(), - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_inspect_template( - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].inspect_template - mock_val = dlp.InspectTemplate(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_inspect_template( - dlp.CreateInspectTemplateRequest(), - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateInspectTemplateRequest, - dict, -]) -def test_update_inspect_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_inspect_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - client.update_inspect_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateInspectTemplateRequest() - -@pytest.mark.asyncio -async def test_update_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_update_inspect_template_async_from_dict(): - await test_update_inspect_template_async(request_type=dict) - - -def test_update_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - call.return_value = dlp.InspectTemplate() - client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - await client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_inspect_template( - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].inspect_template - mock_val = dlp.InspectTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_inspect_template( - dlp.UpdateInspectTemplateRequest(), - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_inspect_template( - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].inspect_template - mock_val = dlp.InspectTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_inspect_template( - dlp.UpdateInspectTemplateRequest(), - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetInspectTemplateRequest, - dict, -]) -def test_get_inspect_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_inspect_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - client.get_inspect_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetInspectTemplateRequest() - -@pytest.mark.asyncio -async def test_get_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_get_inspect_template_async_from_dict(): - await test_get_inspect_template_async(request_type=dict) - - -def test_get_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - call.return_value = dlp.InspectTemplate() - client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - await client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_inspect_template( - dlp.GetInspectTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_inspect_template( - dlp.GetInspectTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListInspectTemplatesRequest, - dict, -]) -def test_list_inspect_templates(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInspectTemplatesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInspectTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInspectTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_inspect_templates_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - client.list_inspect_templates() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInspectTemplatesRequest() - -@pytest.mark.asyncio -async def test_list_inspect_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInspectTemplatesRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInspectTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInspectTemplatesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_inspect_templates_async_from_dict(): - await test_list_inspect_templates_async(request_type=dict) - - -def test_list_inspect_templates_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListInspectTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - call.return_value = dlp.ListInspectTemplatesResponse() - client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_inspect_templates_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListInspectTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) - await client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_inspect_templates_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInspectTemplatesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_inspect_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_inspect_templates_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_inspect_templates( - dlp.ListInspectTemplatesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_inspect_templates_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInspectTemplatesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_inspect_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_inspect_templates_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_inspect_templates( - dlp.ListInspectTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_inspect_templates_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_inspect_templates(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.InspectTemplate) - for i in results) -def test_list_inspect_templates_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - pages = list(client.list_inspect_templates(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_inspect_templates_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_inspect_templates(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.InspectTemplate) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_inspect_templates_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_inspect_templates(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteInspectTemplateRequest, - dict, -]) -def test_delete_inspect_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_inspect_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - client.delete_inspect_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteInspectTemplateRequest() - -@pytest.mark.asyncio -async def test_delete_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_inspect_template_async_from_dict(): - await test_delete_inspect_template_async(request_type=dict) - - -def test_delete_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - call.return_value = None - client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_inspect_template( - dlp.DeleteInspectTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_inspect_template( - dlp.DeleteInspectTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateDeidentifyTemplateRequest, - dict, -]) -def test_create_deidentify_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_deidentify_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - client.create_deidentify_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDeidentifyTemplateRequest() - -@pytest.mark.asyncio -async def test_create_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_create_deidentify_template_async_from_dict(): - await test_create_deidentify_template_async(request_type=dict) - - -def test_create_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDeidentifyTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - call.return_value = dlp.DeidentifyTemplate() - client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDeidentifyTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - await client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_deidentify_template( - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].deidentify_template - mock_val = dlp.DeidentifyTemplate(name='name_value') - assert arg == mock_val - - -def test_create_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_deidentify_template( - dlp.CreateDeidentifyTemplateRequest(), - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_deidentify_template( - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].deidentify_template - mock_val = dlp.DeidentifyTemplate(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_deidentify_template( - dlp.CreateDeidentifyTemplateRequest(), - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateDeidentifyTemplateRequest, - dict, -]) -def test_update_deidentify_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_deidentify_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - client.update_deidentify_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateDeidentifyTemplateRequest() - -@pytest.mark.asyncio -async def test_update_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_update_deidentify_template_async_from_dict(): - await test_update_deidentify_template_async(request_type=dict) - - -def test_update_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - call.return_value = dlp.DeidentifyTemplate() - client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - await client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_deidentify_template( - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].deidentify_template - mock_val = dlp.DeidentifyTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_deidentify_template( - dlp.UpdateDeidentifyTemplateRequest(), - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_deidentify_template( - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].deidentify_template - mock_val = dlp.DeidentifyTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_deidentify_template( - dlp.UpdateDeidentifyTemplateRequest(), - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetDeidentifyTemplateRequest, - dict, -]) -def test_get_deidentify_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_deidentify_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - client.get_deidentify_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDeidentifyTemplateRequest() - -@pytest.mark.asyncio -async def test_get_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_get_deidentify_template_async_from_dict(): - await test_get_deidentify_template_async(request_type=dict) - - -def test_get_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - call.return_value = dlp.DeidentifyTemplate() - client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - await client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_deidentify_template( - dlp.GetDeidentifyTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_deidentify_template( - dlp.GetDeidentifyTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListDeidentifyTemplatesRequest, - dict, -]) -def test_list_deidentify_templates(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDeidentifyTemplatesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDeidentifyTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeidentifyTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_deidentify_templates_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - client.list_deidentify_templates() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDeidentifyTemplatesRequest() - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDeidentifyTemplatesRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDeidentifyTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeidentifyTemplatesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async_from_dict(): - await test_list_deidentify_templates_async(request_type=dict) - - -def test_list_deidentify_templates_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDeidentifyTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - call.return_value = dlp.ListDeidentifyTemplatesResponse() - client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_deidentify_templates_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDeidentifyTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) - await client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_deidentify_templates_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDeidentifyTemplatesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_deidentify_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_deidentify_templates_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_deidentify_templates( - dlp.ListDeidentifyTemplatesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_deidentify_templates_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDeidentifyTemplatesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_deidentify_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_deidentify_templates_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_deidentify_templates( - dlp.ListDeidentifyTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_deidentify_templates_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_deidentify_templates(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.DeidentifyTemplate) - for i in results) -def test_list_deidentify_templates_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - pages = list(client.list_deidentify_templates(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_deidentify_templates(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.DeidentifyTemplate) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_deidentify_templates(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteDeidentifyTemplateRequest, - dict, -]) -def test_delete_deidentify_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_deidentify_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - client.delete_deidentify_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDeidentifyTemplateRequest() - -@pytest.mark.asyncio -async def test_delete_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_deidentify_template_async_from_dict(): - await test_delete_deidentify_template_async(request_type=dict) - - -def test_delete_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - call.return_value = None - client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_deidentify_template( - dlp.DeleteDeidentifyTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_deidentify_template( - dlp.DeleteDeidentifyTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateJobTriggerRequest, - dict, -]) -def test_create_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - response = client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_create_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - client.create_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateJobTriggerRequest() - -@pytest.mark.asyncio -async def test_create_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - )) - response = await client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -@pytest.mark.asyncio -async def test_create_job_trigger_async_from_dict(): - await test_create_job_trigger_async(request_type=dict) - - -def test_create_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateJobTriggerRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - call.return_value = dlp.JobTrigger() - client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateJobTriggerRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - await client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_job_trigger( - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].job_trigger - mock_val = dlp.JobTrigger(name='name_value') - assert arg == mock_val - - -def test_create_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job_trigger( - dlp.CreateJobTriggerRequest(), - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_job_trigger( - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].job_trigger - mock_val = dlp.JobTrigger(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_job_trigger( - dlp.CreateJobTriggerRequest(), - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateJobTriggerRequest, - dict, -]) -def test_update_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - response = client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_update_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - client.update_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateJobTriggerRequest() - -@pytest.mark.asyncio -async def test_update_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - )) - response = await client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -@pytest.mark.asyncio -async def test_update_job_trigger_async_from_dict(): - await test_update_job_trigger_async(request_type=dict) - - -def test_update_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - call.return_value = dlp.JobTrigger() - client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - await client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_job_trigger( - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].job_trigger - mock_val = dlp.JobTrigger(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_job_trigger( - dlp.UpdateJobTriggerRequest(), - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_job_trigger( - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].job_trigger - mock_val = dlp.JobTrigger(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_job_trigger( - dlp.UpdateJobTriggerRequest(), - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.HybridInspectJobTriggerRequest, - dict, -]) -def test_hybrid_inspect_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse( - ) - response = client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -def test_hybrid_inspect_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - client.hybrid_inspect_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectJobTriggerRequest() - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( - )) - response = await client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_async_from_dict(): - await test_hybrid_inspect_job_trigger_async(request_type=dict) - - -def test_hybrid_inspect_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - call.return_value = dlp.HybridInspectResponse() - client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - await client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_hybrid_inspect_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.hybrid_inspect_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_hybrid_inspect_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.hybrid_inspect_job_trigger( - dlp.HybridInspectJobTriggerRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.hybrid_inspect_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.hybrid_inspect_job_trigger( - dlp.HybridInspectJobTriggerRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetJobTriggerRequest, - dict, -]) -def test_get_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - response = client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_get_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - client.get_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetJobTriggerRequest() - -@pytest.mark.asyncio -async def test_get_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.GetJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - )) - response = await client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -@pytest.mark.asyncio -async def test_get_job_trigger_async_from_dict(): - await test_get_job_trigger_async(request_type=dict) - - -def test_get_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - call.return_value = dlp.JobTrigger() - client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - await client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job_trigger( - dlp.GetJobTriggerRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job_trigger( - dlp.GetJobTriggerRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListJobTriggersRequest, - dict, -]) -def test_list_job_triggers(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListJobTriggersResponse( - next_page_token='next_page_token_value', - ) - response = client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListJobTriggersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTriggersPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_job_triggers_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - client.list_job_triggers() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListJobTriggersRequest() - -@pytest.mark.asyncio -async def test_list_job_triggers_async(transport: str = 'grpc_asyncio', request_type=dlp.ListJobTriggersRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListJobTriggersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTriggersAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_job_triggers_async_from_dict(): - await test_list_job_triggers_async(request_type=dict) - - -def test_list_job_triggers_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListJobTriggersRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - call.return_value = dlp.ListJobTriggersResponse() - client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_job_triggers_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListJobTriggersRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) - await client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_job_triggers_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListJobTriggersResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_job_triggers( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_job_triggers_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_job_triggers( - dlp.ListJobTriggersRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_job_triggers_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListJobTriggersResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_job_triggers( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_job_triggers_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_job_triggers( - dlp.ListJobTriggersRequest(), - parent='parent_value', - ) - - -def test_list_job_triggers_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_job_triggers(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.JobTrigger) - for i in results) -def test_list_job_triggers_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - pages = list(client.list_job_triggers(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_job_triggers_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_job_triggers(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.JobTrigger) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_job_triggers_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_job_triggers(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteJobTriggerRequest, - dict, -]) -def test_delete_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - client.delete_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteJobTriggerRequest() - -@pytest.mark.asyncio -async def test_delete_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_job_trigger_async_from_dict(): - await test_delete_job_trigger_async(request_type=dict) - - -def test_delete_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - call.return_value = None - client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job_trigger( - dlp.DeleteJobTriggerRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_job_trigger( - dlp.DeleteJobTriggerRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ActivateJobTriggerRequest, - dict, -]) -def test_activate_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - response = client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ActivateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_activate_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - client.activate_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ActivateJobTriggerRequest() - -@pytest.mark.asyncio -async def test_activate_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.ActivateJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - )) - response = await client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ActivateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -@pytest.mark.asyncio -async def test_activate_job_trigger_async_from_dict(): - await test_activate_job_trigger_async(request_type=dict) - - -def test_activate_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ActivateJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - call.return_value = dlp.DlpJob() - client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_activate_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ActivateJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - await client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateDlpJobRequest, - dict, -]) -def test_create_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - response = client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_create_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - client.create_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDlpJobRequest() - -@pytest.mark.asyncio -async def test_create_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - )) - response = await client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -@pytest.mark.asyncio -async def test_create_dlp_job_async_from_dict(): - await test_create_dlp_job_async(request_type=dict) - - -def test_create_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDlpJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - call.return_value = dlp.DlpJob() - client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDlpJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - await client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_dlp_job( - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) - - -def test_create_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dlp_job( - dlp.CreateDlpJobRequest(), - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - -@pytest.mark.asyncio -async def test_create_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_dlp_job( - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) - -@pytest.mark.asyncio -async def test_create_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_dlp_job( - dlp.CreateDlpJobRequest(), - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListDlpJobsRequest, - dict, -]) -def test_list_dlp_jobs(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDlpJobsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDlpJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDlpJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_dlp_jobs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - client.list_dlp_jobs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDlpJobsRequest() - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDlpJobsRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDlpJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDlpJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async_from_dict(): - await test_list_dlp_jobs_async(request_type=dict) - - -def test_list_dlp_jobs_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDlpJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - call.return_value = dlp.ListDlpJobsResponse() - client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_dlp_jobs_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDlpJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) - await client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_dlp_jobs_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDlpJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_dlp_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_dlp_jobs_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_dlp_jobs( - dlp.ListDlpJobsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_dlp_jobs_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDlpJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_dlp_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_dlp_jobs_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_dlp_jobs( - dlp.ListDlpJobsRequest(), - parent='parent_value', - ) - - -def test_list_dlp_jobs_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_dlp_jobs(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.DlpJob) - for i in results) -def test_list_dlp_jobs_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - pages = list(client.list_dlp_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_dlp_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.DlpJob) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_dlp_jobs(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.GetDlpJobRequest, - dict, -]) -def test_get_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - response = client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_get_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - client.get_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDlpJobRequest() - -@pytest.mark.asyncio -async def test_get_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - )) - response = await client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -@pytest.mark.asyncio -async def test_get_dlp_job_async_from_dict(): - await test_get_dlp_job_async(request_type=dict) - - -def test_get_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - call.return_value = dlp.DlpJob() - client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - await client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_dlp_job( - dlp.GetDlpJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_dlp_job( - dlp.GetDlpJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteDlpJobRequest, - dict, -]) -def test_delete_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - client.delete_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDlpJobRequest() - -@pytest.mark.asyncio -async def test_delete_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_dlp_job_async_from_dict(): - await test_delete_dlp_job_async(request_type=dict) - - -def test_delete_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - call.return_value = None - client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dlp_job( - dlp.DeleteDlpJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_dlp_job( - dlp.DeleteDlpJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CancelDlpJobRequest, - dict, -]) -def test_cancel_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CancelDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_cancel_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - client.cancel_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CancelDlpJobRequest() - -@pytest.mark.asyncio -async def test_cancel_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CancelDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CancelDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_dlp_job_async_from_dict(): - await test_cancel_dlp_job_async(request_type=dict) - - -def test_cancel_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CancelDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - call.return_value = None - client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_cancel_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CancelDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateStoredInfoTypeRequest, - dict, -]) -def test_create_stored_info_type(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType( - name='name_value', - ) - response = client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_create_stored_info_type_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - client.create_stored_info_type() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateStoredInfoTypeRequest() - -@pytest.mark.asyncio -async def test_create_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( - name='name_value', - )) - response = await client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_create_stored_info_type_async_from_dict(): - await test_create_stored_info_type_async(request_type=dict) - - -def test_create_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateStoredInfoTypeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - call.return_value = dlp.StoredInfoType() - client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateStoredInfoTypeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - await client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_stored_info_type( - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].config - mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') - assert arg == mock_val - - -def test_create_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_stored_info_type( - dlp.CreateStoredInfoTypeRequest(), - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - -@pytest.mark.asyncio -async def test_create_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_stored_info_type( - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].config - mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_stored_info_type( - dlp.CreateStoredInfoTypeRequest(), - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateStoredInfoTypeRequest, - dict, -]) -def test_update_stored_info_type(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType( - name='name_value', - ) - response = client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_update_stored_info_type_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - client.update_stored_info_type() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateStoredInfoTypeRequest() - -@pytest.mark.asyncio -async def test_update_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( - name='name_value', - )) - response = await client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_update_stored_info_type_async_from_dict(): - await test_update_stored_info_type_async(request_type=dict) - - -def test_update_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - call.return_value = dlp.StoredInfoType() - client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - await client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_stored_info_type( - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].config - mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_stored_info_type( - dlp.UpdateStoredInfoTypeRequest(), - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_stored_info_type( - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].config - mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_stored_info_type( - dlp.UpdateStoredInfoTypeRequest(), - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetStoredInfoTypeRequest, - dict, -]) -def test_get_stored_info_type(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType( - name='name_value', - ) - response = client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_get_stored_info_type_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - client.get_stored_info_type() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetStoredInfoTypeRequest() - -@pytest.mark.asyncio -async def test_get_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.GetStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( - name='name_value', - )) - response = await client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_stored_info_type_async_from_dict(): - await test_get_stored_info_type_async(request_type=dict) - - -def test_get_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - call.return_value = dlp.StoredInfoType() - client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - await client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_stored_info_type( - dlp.GetStoredInfoTypeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_stored_info_type( - dlp.GetStoredInfoTypeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListStoredInfoTypesRequest, - dict, -]) -def test_list_stored_info_types(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListStoredInfoTypesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListStoredInfoTypesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListStoredInfoTypesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_stored_info_types_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - client.list_stored_info_types() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListStoredInfoTypesRequest() - -@pytest.mark.asyncio -async def test_list_stored_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListStoredInfoTypesRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListStoredInfoTypesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListStoredInfoTypesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_stored_info_types_async_from_dict(): - await test_list_stored_info_types_async(request_type=dict) - - -def test_list_stored_info_types_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListStoredInfoTypesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - call.return_value = dlp.ListStoredInfoTypesResponse() - client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_stored_info_types_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListStoredInfoTypesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) - await client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_stored_info_types_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListStoredInfoTypesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_stored_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_stored_info_types_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_stored_info_types( - dlp.ListStoredInfoTypesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_stored_info_types_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListStoredInfoTypesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_stored_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_stored_info_types_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_stored_info_types( - dlp.ListStoredInfoTypesRequest(), - parent='parent_value', - ) - - -def test_list_stored_info_types_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_stored_info_types(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.StoredInfoType) - for i in results) -def test_list_stored_info_types_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - pages = list(client.list_stored_info_types(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_stored_info_types_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_stored_info_types(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.StoredInfoType) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_stored_info_types_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_stored_info_types(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteStoredInfoTypeRequest, - dict, -]) -def test_delete_stored_info_type(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_stored_info_type_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - client.delete_stored_info_type() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteStoredInfoTypeRequest() - -@pytest.mark.asyncio -async def test_delete_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_stored_info_type_async_from_dict(): - await test_delete_stored_info_type_async(request_type=dict) - - -def test_delete_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - call.return_value = None - client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_stored_info_type( - dlp.DeleteStoredInfoTypeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_stored_info_type( - dlp.DeleteStoredInfoTypeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.HybridInspectDlpJobRequest, - dict, -]) -def test_hybrid_inspect_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse( - ) - response = client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -def test_hybrid_inspect_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - client.hybrid_inspect_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectDlpJobRequest() - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( - )) - response = await client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_async_from_dict(): - await test_hybrid_inspect_dlp_job_async(request_type=dict) - - -def test_hybrid_inspect_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - call.return_value = dlp.HybridInspectResponse() - client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - await client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_hybrid_inspect_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.hybrid_inspect_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_hybrid_inspect_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.hybrid_inspect_dlp_job( - dlp.HybridInspectDlpJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.hybrid_inspect_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.hybrid_inspect_dlp_job( - dlp.HybridInspectDlpJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.FinishDlpJobRequest, - dict, -]) -def test_finish_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.FinishDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_finish_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - client.finish_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.FinishDlpJobRequest() - -@pytest.mark.asyncio -async def test_finish_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.FinishDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.FinishDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_finish_dlp_job_async_from_dict(): - await test_finish_dlp_job_async(request_type=dict) - - -def test_finish_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.FinishDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - call.return_value = None - client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_finish_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.FinishDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.InspectContentRequest, - dict, -]) -def test_inspect_content_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectContentResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.inspect_content(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectContentResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_inspect_content_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_inspect_content") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_inspect_content") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.InspectContentRequest.pb(dlp.InspectContentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.InspectContentResponse.to_json(dlp.InspectContentResponse()) - - request = dlp.InspectContentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.InspectContentResponse() - - client.inspect_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_inspect_content_rest_bad_request(transport: str = 'rest', request_type=dlp.InspectContentRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.inspect_content(request) - - -def test_inspect_content_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.RedactImageRequest, - dict, -]) -def test_redact_image_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.RedactImageResponse( - redacted_image=b'redacted_image_blob', - extracted_text='extracted_text_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.RedactImageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.redact_image(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.RedactImageResponse) - assert response.redacted_image == b'redacted_image_blob' - assert response.extracted_text == 'extracted_text_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_redact_image_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_redact_image") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_redact_image") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.RedactImageRequest.pb(dlp.RedactImageRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.RedactImageResponse.to_json(dlp.RedactImageResponse()) - - request = dlp.RedactImageRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.RedactImageResponse() - - client.redact_image(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_redact_image_rest_bad_request(transport: str = 'rest', request_type=dlp.RedactImageRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.redact_image(request) - - -def test_redact_image_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeidentifyContentRequest, - dict, -]) -def test_deidentify_content_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyContentResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.deidentify_content(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyContentResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_deidentify_content_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_deidentify_content") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_deidentify_content") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.DeidentifyContentRequest.pb(dlp.DeidentifyContentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DeidentifyContentResponse.to_json(dlp.DeidentifyContentResponse()) - - request = dlp.DeidentifyContentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DeidentifyContentResponse() - - client.deidentify_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_deidentify_content_rest_bad_request(transport: str = 'rest', request_type=dlp.DeidentifyContentRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.deidentify_content(request) - - -def test_deidentify_content_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ReidentifyContentRequest, - dict, -]) -def test_reidentify_content_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ReidentifyContentResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ReidentifyContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.reidentify_content(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ReidentifyContentResponse) - - -def test_reidentify_content_rest_required_fields(request_type=dlp.ReidentifyContentRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reidentify_content._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reidentify_content._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ReidentifyContentResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.ReidentifyContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.reidentify_content(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_reidentify_content_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.reidentify_content._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_reidentify_content_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_reidentify_content") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_reidentify_content") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ReidentifyContentRequest.pb(dlp.ReidentifyContentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ReidentifyContentResponse.to_json(dlp.ReidentifyContentResponse()) - - request = dlp.ReidentifyContentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ReidentifyContentResponse() - - client.reidentify_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_reidentify_content_rest_bad_request(transport: str = 'rest', request_type=dlp.ReidentifyContentRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.reidentify_content(request) - - -def test_reidentify_content_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListInfoTypesRequest, - dict, -]) -def test_list_info_types_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListInfoTypesResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_info_types(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ListInfoTypesResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_info_types_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_info_types") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_info_types") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ListInfoTypesRequest.pb(dlp.ListInfoTypesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ListInfoTypesResponse.to_json(dlp.ListInfoTypesResponse()) - - request = dlp.ListInfoTypesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListInfoTypesResponse() - - client.list_info_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_info_types_rest_bad_request(transport: str = 'rest', request_type=dlp.ListInfoTypesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_info_types(request) - - -def test_list_info_types_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListInfoTypesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_info_types(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/infoTypes" % client.transport._host, args[1]) - - -def test_list_info_types_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_info_types( - dlp.ListInfoTypesRequest(), - parent='parent_value', - ) - - -def test_list_info_types_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateInspectTemplateRequest, - dict, -]) -def test_create_inspect_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_inspect_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_inspect_template_rest_required_fields(request_type=dlp.CreateInspectTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_inspect_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_inspect_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_inspect_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "inspectTemplate", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_inspect_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_inspect_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_inspect_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.CreateInspectTemplateRequest.pb(dlp.CreateInspectTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) - - request = dlp.CreateInspectTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.InspectTemplate() - - client.create_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_inspect_template(request) - - -def test_create_inspect_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_inspect_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/inspectTemplates" % client.transport._host, args[1]) - - -def test_create_inspect_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_inspect_template( - dlp.CreateInspectTemplateRequest(), - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - -def test_create_inspect_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateInspectTemplateRequest, - dict, -]) -def test_update_inspect_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_inspect_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_inspect_template_rest_required_fields(request_type=dlp.UpdateInspectTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_inspect_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_inspect_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_inspect_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_inspect_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_inspect_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_inspect_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.UpdateInspectTemplateRequest.pb(dlp.UpdateInspectTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) - - request = dlp.UpdateInspectTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.InspectTemplate() - - client.update_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_inspect_template(request) - - -def test_update_inspect_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/inspectTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_inspect_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, args[1]) - - -def test_update_inspect_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_inspect_template( - dlp.UpdateInspectTemplateRequest(), - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_inspect_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetInspectTemplateRequest, - dict, -]) -def test_get_inspect_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_inspect_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_inspect_template_rest_required_fields(request_type=dlp.GetInspectTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_inspect_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_inspect_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_inspect_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_inspect_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_inspect_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_inspect_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.GetInspectTemplateRequest.pb(dlp.GetInspectTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) - - request = dlp.GetInspectTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.InspectTemplate() - - client.get_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.GetInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_inspect_template(request) - - -def test_get_inspect_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/inspectTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_inspect_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, args[1]) - - -def test_get_inspect_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_inspect_template( - dlp.GetInspectTemplateRequest(), - name='name_value', - ) - - -def test_get_inspect_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListInspectTemplatesRequest, - dict, -]) -def test_list_inspect_templates_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListInspectTemplatesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_inspect_templates(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInspectTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_inspect_templates_rest_required_fields(request_type=dlp.ListInspectTemplatesRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_inspect_templates._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_inspect_templates._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListInspectTemplatesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_inspect_templates(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_inspect_templates_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_inspect_templates._get_unset_required_fields({}) - assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_inspect_templates_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_inspect_templates") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_inspect_templates") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ListInspectTemplatesRequest.pb(dlp.ListInspectTemplatesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ListInspectTemplatesResponse.to_json(dlp.ListInspectTemplatesResponse()) - - request = dlp.ListInspectTemplatesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListInspectTemplatesResponse() - - client.list_inspect_templates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_inspect_templates_rest_bad_request(transport: str = 'rest', request_type=dlp.ListInspectTemplatesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_inspect_templates(request) - - -def test_list_inspect_templates_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListInspectTemplatesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_inspect_templates(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/inspectTemplates" % client.transport._host, args[1]) - - -def test_list_inspect_templates_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_inspect_templates( - dlp.ListInspectTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_inspect_templates_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListInspectTemplatesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'organizations/sample1'} - - pager = client.list_inspect_templates(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.InspectTemplate) - for i in results) - - pages = list(client.list_inspect_templates(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteInspectTemplateRequest, - dict, -]) -def test_delete_inspect_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_inspect_template(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_inspect_template_rest_required_fields(request_type=dlp.DeleteInspectTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_inspect_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_inspect_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_inspect_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_inspect_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_inspect_template") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteInspectTemplateRequest.pb(dlp.DeleteInspectTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.DeleteInspectTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_inspect_template_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/inspectTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_inspect_template(request) - - -def test_delete_inspect_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/inspectTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_inspect_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, args[1]) - - -def test_delete_inspect_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_inspect_template( - dlp.DeleteInspectTemplateRequest(), - name='name_value', - ) - - -def test_delete_inspect_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateDeidentifyTemplateRequest, - dict, -]) -def test_create_deidentify_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_deidentify_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_deidentify_template_rest_required_fields(request_type=dlp.CreateDeidentifyTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_deidentify_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_deidentify_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_deidentify_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "deidentifyTemplate", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_deidentify_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_deidentify_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_deidentify_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.CreateDeidentifyTemplateRequest.pb(dlp.CreateDeidentifyTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) - - request = dlp.CreateDeidentifyTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DeidentifyTemplate() - - client.create_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_deidentify_template(request) - - -def test_create_deidentify_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_deidentify_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/deidentifyTemplates" % client.transport._host, args[1]) - - -def test_create_deidentify_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_deidentify_template( - dlp.CreateDeidentifyTemplateRequest(), - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - -def test_create_deidentify_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateDeidentifyTemplateRequest, - dict, -]) -def test_update_deidentify_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_deidentify_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_deidentify_template_rest_required_fields(request_type=dlp.UpdateDeidentifyTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_deidentify_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_deidentify_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_deidentify_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_deidentify_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_deidentify_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_deidentify_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.UpdateDeidentifyTemplateRequest.pb(dlp.UpdateDeidentifyTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) - - request = dlp.UpdateDeidentifyTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DeidentifyTemplate() - - client.update_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_deidentify_template(request) - - -def test_update_deidentify_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_deidentify_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) - - -def test_update_deidentify_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_deidentify_template( - dlp.UpdateDeidentifyTemplateRequest(), - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_deidentify_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetDeidentifyTemplateRequest, - dict, -]) -def test_get_deidentify_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_deidentify_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_deidentify_template_rest_required_fields(request_type=dlp.GetDeidentifyTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_deidentify_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_deidentify_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_deidentify_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_deidentify_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_deidentify_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_deidentify_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.GetDeidentifyTemplateRequest.pb(dlp.GetDeidentifyTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) - - request = dlp.GetDeidentifyTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DeidentifyTemplate() - - client.get_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.GetDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_deidentify_template(request) - - -def test_get_deidentify_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_deidentify_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) - - -def test_get_deidentify_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_deidentify_template( - dlp.GetDeidentifyTemplateRequest(), - name='name_value', - ) - - -def test_get_deidentify_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListDeidentifyTemplatesRequest, - dict, -]) -def test_list_deidentify_templates_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListDeidentifyTemplatesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_deidentify_templates(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeidentifyTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_deidentify_templates_rest_required_fields(request_type=dlp.ListDeidentifyTemplatesRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_deidentify_templates._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_deidentify_templates._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListDeidentifyTemplatesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_deidentify_templates(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_deidentify_templates_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_deidentify_templates._get_unset_required_fields({}) - assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_deidentify_templates_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_deidentify_templates") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_deidentify_templates") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ListDeidentifyTemplatesRequest.pb(dlp.ListDeidentifyTemplatesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ListDeidentifyTemplatesResponse.to_json(dlp.ListDeidentifyTemplatesResponse()) - - request = dlp.ListDeidentifyTemplatesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListDeidentifyTemplatesResponse() - - client.list_deidentify_templates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_deidentify_templates_rest_bad_request(transport: str = 'rest', request_type=dlp.ListDeidentifyTemplatesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_deidentify_templates(request) - - -def test_list_deidentify_templates_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListDeidentifyTemplatesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_deidentify_templates(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/deidentifyTemplates" % client.transport._host, args[1]) - - -def test_list_deidentify_templates_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_deidentify_templates( - dlp.ListDeidentifyTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_deidentify_templates_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListDeidentifyTemplatesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'organizations/sample1'} - - pager = client.list_deidentify_templates(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.DeidentifyTemplate) - for i in results) - - pages = list(client.list_deidentify_templates(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteDeidentifyTemplateRequest, - dict, -]) -def test_delete_deidentify_template_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_deidentify_template(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_deidentify_template_rest_required_fields(request_type=dlp.DeleteDeidentifyTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_deidentify_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_deidentify_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_deidentify_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_deidentify_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_deidentify_template") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteDeidentifyTemplateRequest.pb(dlp.DeleteDeidentifyTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.DeleteDeidentifyTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_deidentify_template_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_deidentify_template(request) - - -def test_delete_deidentify_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_deidentify_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) - - -def test_delete_deidentify_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_deidentify_template( - dlp.DeleteDeidentifyTemplateRequest(), - name='name_value', - ) - - -def test_delete_deidentify_template_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateJobTriggerRequest, - dict, -]) -def test_create_job_trigger_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_create_job_trigger_rest_required_fields(request_type=dlp.CreateJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "jobTrigger", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.CreateJobTriggerRequest.pb(dlp.CreateJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) - - request = dlp.CreateJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.JobTrigger() - - client.create_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_job_trigger(request) - - -def test_create_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1]) - - -def test_create_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job_trigger( - dlp.CreateJobTriggerRequest(), - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - -def test_create_job_trigger_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateJobTriggerRequest, - dict, -]) -def test_update_job_trigger_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_update_job_trigger_rest_required_fields(request_type=dlp.UpdateJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.UpdateJobTriggerRequest.pb(dlp.UpdateJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) - - request = dlp.UpdateJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.JobTrigger() - - client.update_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_job_trigger(request) - - -def test_update_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) - - -def test_update_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_job_trigger( - dlp.UpdateJobTriggerRequest(), - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_job_trigger_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.HybridInspectJobTriggerRequest, - dict, -]) -def test_hybrid_inspect_job_trigger_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.hybrid_inspect_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -def test_hybrid_inspect_job_trigger_rest_required_fields(request_type=dlp.HybridInspectJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.hybrid_inspect_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_hybrid_inspect_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.hybrid_inspect_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_hybrid_inspect_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.HybridInspectJobTriggerRequest.pb(dlp.HybridInspectJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.HybridInspectResponse.to_json(dlp.HybridInspectResponse()) - - request = dlp.HybridInspectJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.HybridInspectResponse() - - client.hybrid_inspect_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_hybrid_inspect_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.HybridInspectJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.hybrid_inspect_job_trigger(request) - - -def test_hybrid_inspect_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.hybrid_inspect_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect" % client.transport._host, args[1]) - - -def test_hybrid_inspect_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.hybrid_inspect_job_trigger( - dlp.HybridInspectJobTriggerRequest(), - name='name_value', - ) - - -def test_hybrid_inspect_job_trigger_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetJobTriggerRequest, - dict, -]) -def test_get_job_trigger_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_get_job_trigger_rest_required_fields(request_type=dlp.GetJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.GetJobTriggerRequest.pb(dlp.GetJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) - - request = dlp.GetJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.JobTrigger() - - client.get_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.GetJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_job_trigger(request) - - -def test_get_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) - - -def test_get_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job_trigger( - dlp.GetJobTriggerRequest(), - name='name_value', - ) - - -def test_get_job_trigger_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListJobTriggersRequest, - dict, -]) -def test_list_job_triggers_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListJobTriggersResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_job_triggers(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTriggersPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_job_triggers_rest_required_fields(request_type=dlp.ListJobTriggersRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_job_triggers._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_job_triggers._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "location_id", "order_by", "page_size", "page_token", "type_", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListJobTriggersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_job_triggers(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_job_triggers_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_job_triggers._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "locationId", "orderBy", "pageSize", "pageToken", "type", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_job_triggers_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_job_triggers") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_job_triggers") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ListJobTriggersRequest.pb(dlp.ListJobTriggersRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ListJobTriggersResponse.to_json(dlp.ListJobTriggersResponse()) - - request = dlp.ListJobTriggersRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListJobTriggersResponse() - - client.list_job_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_job_triggers_rest_bad_request(transport: str = 'rest', request_type=dlp.ListJobTriggersRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_job_triggers(request) - - -def test_list_job_triggers_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListJobTriggersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_job_triggers(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1]) - - -def test_list_job_triggers_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_job_triggers( - dlp.ListJobTriggersRequest(), - parent='parent_value', - ) - - -def test_list_job_triggers_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListJobTriggersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1'} - - pager = client.list_job_triggers(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.JobTrigger) - for i in results) - - pages = list(client.list_job_triggers(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteJobTriggerRequest, - dict, -]) -def test_delete_job_trigger_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_job_trigger(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_trigger_rest_required_fields(request_type=dlp.DeleteJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_job_trigger") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteJobTriggerRequest.pb(dlp.DeleteJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.DeleteJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_job_trigger(request) - - -def test_delete_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) - - -def test_delete_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job_trigger( - dlp.DeleteJobTriggerRequest(), - name='name_value', - ) - - -def test_delete_job_trigger_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ActivateJobTriggerRequest, - dict, -]) -def test_activate_job_trigger_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.activate_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_activate_job_trigger_rest_required_fields(request_type=dlp.ActivateJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).activate_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).activate_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.activate_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_activate_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.activate_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_activate_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_activate_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_activate_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ActivateJobTriggerRequest.pb(dlp.ActivateJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) - - request = dlp.ActivateJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DlpJob() - - client.activate_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_activate_job_trigger_rest_bad_request(transport: str = 'rest', request_type=dlp.ActivateJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.activate_job_trigger(request) - - -def test_activate_job_trigger_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateDlpJobRequest, - dict, -]) -def test_create_dlp_job_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_dlp_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_create_dlp_job_rest_required_fields(request_type=dlp.CreateDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_dlp_job") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_dlp_job") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.CreateDlpJobRequest.pb(dlp.CreateDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) - - request = dlp.CreateDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DlpJob() - - client.create_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_dlp_job(request) - - -def test_create_dlp_job_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_dlp_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1]) - - -def test_create_dlp_job_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dlp_job( - dlp.CreateDlpJobRequest(), - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - -def test_create_dlp_job_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListDlpJobsRequest, - dict, -]) -def test_list_dlp_jobs_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListDlpJobsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_dlp_jobs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDlpJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_dlp_jobs_rest_required_fields(request_type=dlp.ListDlpJobsRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_dlp_jobs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_dlp_jobs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "location_id", "order_by", "page_size", "page_token", "type_", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListDlpJobsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_dlp_jobs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_dlp_jobs_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_dlp_jobs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "locationId", "orderBy", "pageSize", "pageToken", "type", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_dlp_jobs_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_dlp_jobs") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_dlp_jobs") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ListDlpJobsRequest.pb(dlp.ListDlpJobsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ListDlpJobsResponse.to_json(dlp.ListDlpJobsResponse()) - - request = dlp.ListDlpJobsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListDlpJobsResponse() - - client.list_dlp_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_dlp_jobs_rest_bad_request(transport: str = 'rest', request_type=dlp.ListDlpJobsRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_dlp_jobs(request) - - -def test_list_dlp_jobs_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListDlpJobsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_dlp_jobs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1]) - - -def test_list_dlp_jobs_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_dlp_jobs( - dlp.ListDlpJobsRequest(), - parent='parent_value', - ) - - -def test_list_dlp_jobs_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListDlpJobsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1'} - - pager = client.list_dlp_jobs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.DlpJob) - for i in results) - - pages = list(client.list_dlp_jobs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - dlp.GetDlpJobRequest, - dict, -]) -def test_get_dlp_job_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_dlp_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_get_dlp_job_rest_required_fields(request_type=dlp.GetDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_dlp_job") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_dlp_job") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.GetDlpJobRequest.pb(dlp.GetDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) - - request = dlp.GetDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DlpJob() - - client.get_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.GetDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_dlp_job(request) - - -def test_get_dlp_job_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/dlpJobs/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_dlp_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1]) - - -def test_get_dlp_job_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_dlp_job( - dlp.GetDlpJobRequest(), - name='name_value', - ) - - -def test_get_dlp_job_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteDlpJobRequest, - dict, -]) -def test_delete_dlp_job_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_dlp_job(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_dlp_job_rest_required_fields(request_type=dlp.DeleteDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_dlp_job") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteDlpJobRequest.pb(dlp.DeleteDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.DeleteDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_dlp_job(request) - - -def test_delete_dlp_job_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/dlpJobs/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_dlp_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1]) - - -def test_delete_dlp_job_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dlp_job( - dlp.DeleteDlpJobRequest(), - name='name_value', - ) - - -def test_delete_dlp_job_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CancelDlpJobRequest, - dict, -]) -def test_cancel_dlp_job_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.cancel_dlp_job(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_cancel_dlp_job_rest_required_fields(request_type=dlp.CancelDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.cancel_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_cancel_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.cancel_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_cancel_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_cancel_dlp_job") as pre: - pre.assert_not_called() - pb_message = dlp.CancelDlpJobRequest.pb(dlp.CancelDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.CancelDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.cancel_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_cancel_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.CancelDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_dlp_job(request) - - -def test_cancel_dlp_job_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateStoredInfoTypeRequest, - dict, -]) -def test_create_stored_info_type_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_stored_info_type(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_create_stored_info_type_rest_required_fields(request_type=dlp.CreateStoredInfoTypeRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_stored_info_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_stored_info_type_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_stored_info_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "config", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_stored_info_type_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_stored_info_type") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_stored_info_type") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.CreateStoredInfoTypeRequest.pb(dlp.CreateStoredInfoTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) - - request = dlp.CreateStoredInfoTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.StoredInfoType() - - client.create_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.CreateStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_stored_info_type(request) - - -def test_create_stored_info_type_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_stored_info_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, args[1]) - - -def test_create_stored_info_type_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_stored_info_type( - dlp.CreateStoredInfoTypeRequest(), - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - -def test_create_stored_info_type_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateStoredInfoTypeRequest, - dict, -]) -def test_update_stored_info_type_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_stored_info_type(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_update_stored_info_type_rest_required_fields(request_type=dlp.UpdateStoredInfoTypeRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_stored_info_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_stored_info_type_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_stored_info_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_stored_info_type_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_stored_info_type") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_stored_info_type") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.UpdateStoredInfoTypeRequest.pb(dlp.UpdateStoredInfoTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) - - request = dlp.UpdateStoredInfoTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.StoredInfoType() - - client.update_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.UpdateStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_stored_info_type(request) - - -def test_update_stored_info_type_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_stored_info_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) - - -def test_update_stored_info_type_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_stored_info_type( - dlp.UpdateStoredInfoTypeRequest(), - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_stored_info_type_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetStoredInfoTypeRequest, - dict, -]) -def test_get_stored_info_type_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_stored_info_type(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_get_stored_info_type_rest_required_fields(request_type=dlp.GetStoredInfoTypeRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_stored_info_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_stored_info_type_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_stored_info_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_stored_info_type_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_stored_info_type") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_stored_info_type") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.GetStoredInfoTypeRequest.pb(dlp.GetStoredInfoTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) - - request = dlp.GetStoredInfoTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.StoredInfoType() - - client.get_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.GetStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_stored_info_type(request) - - -def test_get_stored_info_type_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_stored_info_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) - - -def test_get_stored_info_type_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_stored_info_type( - dlp.GetStoredInfoTypeRequest(), - name='name_value', - ) - - -def test_get_stored_info_type_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListStoredInfoTypesRequest, - dict, -]) -def test_list_stored_info_types_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListStoredInfoTypesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_stored_info_types(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListStoredInfoTypesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_stored_info_types_rest_required_fields(request_type=dlp.ListStoredInfoTypesRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_stored_info_types._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_stored_info_types._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListStoredInfoTypesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_stored_info_types(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_stored_info_types_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_stored_info_types._get_unset_required_fields({}) - assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_stored_info_types_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_stored_info_types") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_stored_info_types") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.ListStoredInfoTypesRequest.pb(dlp.ListStoredInfoTypesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.ListStoredInfoTypesResponse.to_json(dlp.ListStoredInfoTypesResponse()) - - request = dlp.ListStoredInfoTypesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListStoredInfoTypesResponse() - - client.list_stored_info_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_stored_info_types_rest_bad_request(transport: str = 'rest', request_type=dlp.ListStoredInfoTypesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_stored_info_types(request) - - -def test_list_stored_info_types_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListStoredInfoTypesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_stored_info_types(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, args[1]) - - -def test_list_stored_info_types_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_stored_info_types( - dlp.ListStoredInfoTypesRequest(), - parent='parent_value', - ) - - -def test_list_stored_info_types_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListStoredInfoTypesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'organizations/sample1'} - - pager = client.list_stored_info_types(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.StoredInfoType) - for i in results) - - pages = list(client.list_stored_info_types(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteStoredInfoTypeRequest, - dict, -]) -def test_delete_stored_info_type_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_stored_info_type(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_stored_info_type_rest_required_fields(request_type=dlp.DeleteStoredInfoTypeRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_stored_info_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_stored_info_type_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_stored_info_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_stored_info_type_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_stored_info_type") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteStoredInfoTypeRequest.pb(dlp.DeleteStoredInfoTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.DeleteStoredInfoTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_stored_info_type_rest_bad_request(transport: str = 'rest', request_type=dlp.DeleteStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_stored_info_type(request) - - -def test_delete_stored_info_type_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_stored_info_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) - - -def test_delete_stored_info_type_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_stored_info_type( - dlp.DeleteStoredInfoTypeRequest(), - name='name_value', - ) - - -def test_delete_stored_info_type_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.HybridInspectDlpJobRequest, - dict, -]) -def test_hybrid_inspect_dlp_job_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.hybrid_inspect_dlp_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -def test_hybrid_inspect_dlp_job_rest_required_fields(request_type=dlp.HybridInspectDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.hybrid_inspect_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_hybrid_inspect_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.hybrid_inspect_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_hybrid_inspect_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_dlp_job") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_dlp_job") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = dlp.HybridInspectDlpJobRequest.pb(dlp.HybridInspectDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = dlp.HybridInspectResponse.to_json(dlp.HybridInspectResponse()) - - request = dlp.HybridInspectDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.HybridInspectResponse() - - client.hybrid_inspect_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_hybrid_inspect_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.HybridInspectDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.hybrid_inspect_dlp_job(request) - - -def test_hybrid_inspect_dlp_job_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.hybrid_inspect_dlp_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect" % client.transport._host, args[1]) - - -def test_hybrid_inspect_dlp_job_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.hybrid_inspect_dlp_job( - dlp.HybridInspectDlpJobRequest(), - name='name_value', - ) - - -def test_hybrid_inspect_dlp_job_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.FinishDlpJobRequest, - dict, -]) -def test_finish_dlp_job_rest(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.finish_dlp_job(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_finish_dlp_job_rest_required_fields(request_type=dlp.FinishDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).finish_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).finish_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.finish_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_finish_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.finish_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_finish_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_finish_dlp_job") as pre: - pre.assert_not_called() - pb_message = dlp.FinishDlpJobRequest.pb(dlp.FinishDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = dlp.FinishDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.finish_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_finish_dlp_job_rest_bad_request(transport: str = 'rest', request_type=dlp.FinishDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.finish_dlp_job(request) - - -def test_finish_dlp_job_rest_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DlpServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DlpServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceGrpcAsyncIOTransport, - transports.DlpServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = DlpServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DlpServiceGrpcTransport, - ) - -def test_dlp_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DlpServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_dlp_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DlpServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'inspect_content', - 'redact_image', - 'deidentify_content', - 'reidentify_content', - 'list_info_types', - 'create_inspect_template', - 'update_inspect_template', - 'get_inspect_template', - 'list_inspect_templates', - 'delete_inspect_template', - 'create_deidentify_template', - 'update_deidentify_template', - 'get_deidentify_template', - 'list_deidentify_templates', - 'delete_deidentify_template', - 'create_job_trigger', - 'update_job_trigger', - 'hybrid_inspect_job_trigger', - 'get_job_trigger', - 'list_job_triggers', - 'delete_job_trigger', - 'activate_job_trigger', - 'create_dlp_job', - 'list_dlp_jobs', - 'get_dlp_job', - 'delete_dlp_job', - 'cancel_dlp_job', - 'create_stored_info_type', - 'update_stored_info_type', - 'get_stored_info_type', - 'list_stored_info_types', - 'delete_stored_info_type', - 'hybrid_inspect_dlp_job', - 'finish_dlp_job', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_dlp_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DlpServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_dlp_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DlpServiceTransport() - adc.assert_called_once() - - -def test_dlp_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DlpServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceGrpcAsyncIOTransport, - ], -) -def test_dlp_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceGrpcAsyncIOTransport, - transports.DlpServiceRestTransport, - ], -) -def test_dlp_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DlpServiceGrpcTransport, grpc_helpers), - (transports.DlpServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_dlp_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dlp.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dlp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) -def test_dlp_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_dlp_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.DlpServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_dlp_service_host_no_port(transport_name): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dlp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dlp.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_dlp_service_host_with_port(transport_name): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dlp.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dlp.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_dlp_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = DlpServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = DlpServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.inspect_content._session - session2 = client2.transport.inspect_content._session - assert session1 != session2 - session1 = client1.transport.redact_image._session - session2 = client2.transport.redact_image._session - assert session1 != session2 - session1 = client1.transport.deidentify_content._session - session2 = client2.transport.deidentify_content._session - assert session1 != session2 - session1 = client1.transport.reidentify_content._session - session2 = client2.transport.reidentify_content._session - assert session1 != session2 - session1 = client1.transport.list_info_types._session - session2 = client2.transport.list_info_types._session - assert session1 != session2 - session1 = client1.transport.create_inspect_template._session - session2 = client2.transport.create_inspect_template._session - assert session1 != session2 - session1 = client1.transport.update_inspect_template._session - session2 = client2.transport.update_inspect_template._session - assert session1 != session2 - session1 = client1.transport.get_inspect_template._session - session2 = client2.transport.get_inspect_template._session - assert session1 != session2 - session1 = client1.transport.list_inspect_templates._session - session2 = client2.transport.list_inspect_templates._session - assert session1 != session2 - session1 = client1.transport.delete_inspect_template._session - session2 = client2.transport.delete_inspect_template._session - assert session1 != session2 - session1 = client1.transport.create_deidentify_template._session - session2 = client2.transport.create_deidentify_template._session - assert session1 != session2 - session1 = client1.transport.update_deidentify_template._session - session2 = client2.transport.update_deidentify_template._session - assert session1 != session2 - session1 = client1.transport.get_deidentify_template._session - session2 = client2.transport.get_deidentify_template._session - assert session1 != session2 - session1 = client1.transport.list_deidentify_templates._session - session2 = client2.transport.list_deidentify_templates._session - assert session1 != session2 - session1 = client1.transport.delete_deidentify_template._session - session2 = client2.transport.delete_deidentify_template._session - assert session1 != session2 - session1 = client1.transport.create_job_trigger._session - session2 = client2.transport.create_job_trigger._session - assert session1 != session2 - session1 = client1.transport.update_job_trigger._session - session2 = client2.transport.update_job_trigger._session - assert session1 != session2 - session1 = client1.transport.hybrid_inspect_job_trigger._session - session2 = client2.transport.hybrid_inspect_job_trigger._session - assert session1 != session2 - session1 = client1.transport.get_job_trigger._session - session2 = client2.transport.get_job_trigger._session - assert session1 != session2 - session1 = client1.transport.list_job_triggers._session - session2 = client2.transport.list_job_triggers._session - assert session1 != session2 - session1 = client1.transport.delete_job_trigger._session - session2 = client2.transport.delete_job_trigger._session - assert session1 != session2 - session1 = client1.transport.activate_job_trigger._session - session2 = client2.transport.activate_job_trigger._session - assert session1 != session2 - session1 = client1.transport.create_dlp_job._session - session2 = client2.transport.create_dlp_job._session - assert session1 != session2 - session1 = client1.transport.list_dlp_jobs._session - session2 = client2.transport.list_dlp_jobs._session - assert session1 != session2 - session1 = client1.transport.get_dlp_job._session - session2 = client2.transport.get_dlp_job._session - assert session1 != session2 - session1 = client1.transport.delete_dlp_job._session - session2 = client2.transport.delete_dlp_job._session - assert session1 != session2 - session1 = client1.transport.cancel_dlp_job._session - session2 = client2.transport.cancel_dlp_job._session - assert session1 != session2 - session1 = client1.transport.create_stored_info_type._session - session2 = client2.transport.create_stored_info_type._session - assert session1 != session2 - session1 = client1.transport.update_stored_info_type._session - session2 = client2.transport.update_stored_info_type._session - assert session1 != session2 - session1 = client1.transport.get_stored_info_type._session - session2 = client2.transport.get_stored_info_type._session - assert session1 != session2 - session1 = client1.transport.list_stored_info_types._session - session2 = client2.transport.list_stored_info_types._session - assert session1 != session2 - session1 = client1.transport.delete_stored_info_type._session - session2 = client2.transport.delete_stored_info_type._session - assert session1 != session2 - session1 = client1.transport.hybrid_inspect_dlp_job._session - session2 = client2.transport.hybrid_inspect_dlp_job._session - assert session1 != session2 - session1 = client1.transport.finish_dlp_job._session - session2 = client2.transport.finish_dlp_job._session - assert session1 != session2 -def test_dlp_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DlpServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_dlp_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DlpServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) -def test_dlp_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) -def test_dlp_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_deidentify_template_path(): - organization = "squid" - deidentify_template = "clam" - expected = "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) - actual = DlpServiceClient.deidentify_template_path(organization, deidentify_template) - assert expected == actual - - -def test_parse_deidentify_template_path(): - expected = { - "organization": "whelk", - "deidentify_template": "octopus", - } - path = DlpServiceClient.deidentify_template_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_deidentify_template_path(path) - assert expected == actual - -def test_dlp_content_path(): - project = "oyster" - expected = "projects/{project}/dlpContent".format(project=project, ) - actual = DlpServiceClient.dlp_content_path(project) - assert expected == actual - - -def test_parse_dlp_content_path(): - expected = { - "project": "nudibranch", - } - path = DlpServiceClient.dlp_content_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_dlp_content_path(path) - assert expected == actual - -def test_dlp_job_path(): - project = "cuttlefish" - dlp_job = "mussel" - expected = "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) - actual = DlpServiceClient.dlp_job_path(project, dlp_job) - assert expected == actual - - -def test_parse_dlp_job_path(): - expected = { - "project": "winkle", - "dlp_job": "nautilus", - } - path = DlpServiceClient.dlp_job_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_dlp_job_path(path) - assert expected == actual - -def test_finding_path(): - project = "scallop" - location = "abalone" - finding = "squid" - expected = "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) - actual = DlpServiceClient.finding_path(project, location, finding) - assert expected == actual - - -def test_parse_finding_path(): - expected = { - "project": "clam", - "location": "whelk", - "finding": "octopus", - } - path = DlpServiceClient.finding_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_finding_path(path) - assert expected == actual - -def test_inspect_template_path(): - organization = "oyster" - inspect_template = "nudibranch" - expected = "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) - actual = DlpServiceClient.inspect_template_path(organization, inspect_template) - assert expected == actual - - -def test_parse_inspect_template_path(): - expected = { - "organization": "cuttlefish", - "inspect_template": "mussel", - } - path = DlpServiceClient.inspect_template_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_inspect_template_path(path) - assert expected == actual - -def test_job_trigger_path(): - project = "winkle" - job_trigger = "nautilus" - expected = "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) - actual = DlpServiceClient.job_trigger_path(project, job_trigger) - assert expected == actual - - -def test_parse_job_trigger_path(): - expected = { - "project": "scallop", - "job_trigger": "abalone", - } - path = DlpServiceClient.job_trigger_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_job_trigger_path(path) - assert expected == actual - -def test_stored_info_type_path(): - organization = "squid" - stored_info_type = "clam" - expected = "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) - actual = DlpServiceClient.stored_info_type_path(organization, stored_info_type) - assert expected == actual - - -def test_parse_stored_info_type_path(): - expected = { - "organization": "whelk", - "stored_info_type": "octopus", - } - path = DlpServiceClient.stored_info_type_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_stored_info_type_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DlpServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = DlpServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = DlpServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = DlpServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DlpServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = DlpServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = DlpServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = DlpServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DlpServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = DlpServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = DlpServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) From 9ac1782e792ae4c15dd48d9953b580e7e624ff87 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 28 Feb 2023 06:42:56 -0500 Subject: [PATCH 7/7] revert --- google/cloud/dlp_v2/types/storage.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/google/cloud/dlp_v2/types/storage.py b/google/cloud/dlp_v2/types/storage.py index 745d30c9..1d8ac35d 100644 --- a/google/cloud/dlp_v2/types/storage.py +++ b/google/cloud/dlp_v2/types/storage.py @@ -112,11 +112,11 @@ class FileType(proto.Enum): WORD (5): Word files >30 MB will be scanned as binary files. Included file extensions: - docx, dotx, docm, dotm + docx, dotx, docm, dotm PDF (6): PDF files >30 MB will be scanned as binary files. Included file extensions: - pdf + pdf AVRO (7): Included file extensions: avro @@ -129,11 +129,11 @@ class FileType(proto.Enum): POWERPOINT (11): Powerpoint files >30 MB will be scanned as binary files. Included file extensions: - pptx, pptm, potx, potm, pot + pptx, pptm, potx, potm, pot EXCEL (12): Excel files >30 MB will be scanned as binary files. Included file extensions: - xlsx, xlsm, xltx, xltm + xlsx, xlsm, xltx, xltm """ FILE_TYPE_UNSPECIFIED = 0 BINARY_FILE = 1